python – Multi-input single output regression using Scikit neural networks MLPRegressor

IIUC, You need Recurrent Neural Network(RNN) or Long short-term memory(LSTM), you can solve your problem with tensorflow like below:

import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
import pandas as pd


data = pd.DataFrame([[1,1,1],[2,4.28,5.65],
                     [3,10.04,15.59],[4,18.38,32],
                     [5,29.36,55.90],[6,43.06,88.18],
                     [7,59.52,129.64],[8,78.79,181.02],
                     [9,100.90,243],[10,125.89,316.22]], 
                    columns=['M','C=2.1','C=2.5']) 
arr = data.values[:, 1:]
arr = np.insert(arr, 0, [2.1 , 2.5], axis=0).T
arr = np.lib.stride_tricks.sliding_window_view(arr, 3, axis=1)

X_train = arr[...,:-1]
y_train = arr[..., -1]
X_train = X_train.reshape([-1,2])
y_train = y_train.ravel()

model = tf.keras.models.Sequential()
model.add(tf.keras.layers.SimpleRNN(200, input_shape=(2,1), activation='tanh'))
model.add(tf.keras.layers.Dense(1))
model.compile(loss="mean_absolute_error", optimizer="adam")

model.fit(X_train, y_train, epochs=1000, batch_size=16)

X_test = np.array([[2.3,1]], dtype=np.float64)
res = []
for _ in range(10):
    y_pred = model.predict(X_test)
    X_test = np.append(X_test, y_pred[0][0])[1:][None,...]
    res.append(y_pred[0][0])

# predict res for 2.3 => [4.725103, 10.564905, 19.61012, 30.361404, 43.3811, 60.815994, 86.90681, 118.34455, 154.1196, 181.1613]


M = data['M'].values
C1 = data['C=2.1'].values
C2 = data['C=2.5'].values
C = np.column_stack((C1,C2))
M = M.reshape(M.shape[0],-1)

for i in range(0, 2):
    plt.plot(M, C[:,i])
plt.plot(res)
plt.show()

Output:

...
Epoch 999/1000
2/2 [==============================] - 0s 8ms/step - loss: 13.2078
Epoch 1000/1000
2/2 [==============================] - 0s 10ms/step - loss: 13.0156

enter image description here

Read more here: Source link