Demo entry 6864281

regressor_keras

   

Submitted by anonymous on Aug 13, 2019 at 13:27
Language: Python. Code size: 1.3 kB.

import numpy as np
from keras.models import Sequential, Model, Input
from keras.layers import Dense, Activation, Concatenate, Multiply
import matplotlib.pyplot as plt

# create some data
data_num = 2000
X = np.linspace(-1, 1, data_num)
np.random.shuffle(X)    # randomize the data
Y = 10*X**2 + 2*X + 2 + np.random.normal(0, 0.05, (data_num, ))
X = X.reshape(len(X),1)


X_train, Y_train = X[:-40], Y[:-40]     # first 160 data points
X_test, Y_test = X[-40:], Y[-40:]       # last 40 data points


# build a neural network from the 1st layer to the last layer
x_tensor = Input(shape=(1,))
x_square = Multiply()([x_tensor, x_tensor])
x_all = Concatenate()([x_square, x_tensor])
y_tensor = Dense(1)(x_all)
model = Model(input=x_tensor, output=y_tensor)

# choose loss function and optimizing method
model.compile(loss='mse', optimizer='sgd') #mse: mean square root

# training
print('Training -----------')
model.fit(X_train, Y_train, epochs=300, batch_size=100, verbose=1)

# test
print('\nTesting ------------')
cost = model.evaluate(X_test, Y_test, batch_size=40)
print('test cost:', cost)

# plotting the prediction
Y_pred = model.predict(X_test)
test = X_test.reshape(-1)
plt.scatter(test, Y_test)
plt.plot(test, Y_pred, 'ro')
plt.show()
for i in range(4):
        print('Layer ', i, '= ', model.layers[i].get_weights(), '\n')

This snippet took 0.00 seconds to highlight.

Back to the Entry List or Home.

Delete this entry (admin only).