Hello. Function fitting using multilayer perceptron (using Keras). Fitting is performed on the function value with noise added. I also plotted the state of convergence.
mlp_sinusoidal_keras.py
#from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.optimizers import SGD
#from sklearn.metrics import mean_squared_error
param = {'num_hidden_layers': 2, 'num_nodes': 100, 'num_epochs': 8000, 'activation': 'relu'}
# 'activation': ['tanh' 'sigmoid' 'relu']
# generating data
num_training, num_testing, sigma, seed = 400, 100, 0.3, 0
def sin(x):
return np.sin(x)*3+1
N = num_training + num_testing
tau=4*np.pi
np.random.seed(seed)
X = np.random.random((N,1))*tau
Y = sin(X)+np.random.normal(0,sigma,(N,1))
I = np.arange(N)
np.random.shuffle(I)
training, testing = I[:num_training], I[num_training:]
# multilayer perceptron
model = Sequential()
for _ in range(param['num_hidden_layers']):
model.add(Dense(param['num_nodes'], input_dim=1))
model.add(Activation(param['activation']))
model.add(Dense(1))
model.compile('sgd', 'mse')
model.fit(X[training], Y[training], validation_split=0.1, nb_epoch=param['num_epochs'])
pred = model.predict(X[testing])
#print("error=", mean_squared_error(pred, Y[testing]))
# plotting
X0 = np.linspace(0,tau)
plt.plot(X0, sin(X0), 'r', alpha=0.5)
plt.plot(X, Y, 'b.', alpha=0.3)
plt.plot(X[testing], pred, 'yo')
plt.axis([-0.2,tau+0.2,-5,6])
plt.show()
exit(0)
Recommended Posts