1. Linear regression

# Import Kit

Import keras
 Import numpy AS NP
 Import matplotlib.pyplot AS PLT
 # the Sequential constituted by the model order 
from keras.models Import the Sequential
 # the Dense layer fully connected 
from keras.layers Import the Dense
# Use numpy generated 100 random points 
x_data = np.random.rand (100 ) 
Noise = np.random.normal (0, 0.01 , x_data.shape) 
y_data = x_data + 0.2 + 0.1 * Noise 

# display random point 
plt.scatter (x_data, y_data) 
plt.show ()

# Build a sequential model 
Model = the Sequential ()
 # add in a fully connected model layer 
model.add (the Dense (Units =. 1, input_dim =. 1 ))
 # SGD: Stochastic gradient descent of, stochastic gradient descent method 
# MSE: on Mean Squared error, mean square error 
model.compile (Optimizer = ' SGD ' , Loss = ' MSE ' ) 

# training 3001 batches 
for STEP in Range (3001 ):
     # each training a batch 
    cost = model.train_on_batch (x_data, y_data)
     # per 500 prints a batch cost value 
    IF STEP 500% == 0:
         Print( ' Cost: ' , cost) 
        
# print weights and offset values 
W is, B = model.layers [0] .get_weights ()
 Print ( ' W is: ' , W is, ' B: ' , B) 

# x_data input network , the predicted value obtained y_pred 
y_pred = model.predict (x_data) 

# display random point 
plt.scatter (x_data, y_data)
 # displays the forecast 
plt.plot (x_data, y_pred, ' R- ' , LW =. 3 ) 
plt.show ()

 

Guess you like

Origin www.cnblogs.com/liuwenhua/p/11566029.html