简单的全连接神经网络,网络结构为2-2-1
代码如下:
- #encoding='utf-8'
- """
- created on 2018-08-10
- @author wt
- """
- import tensorflow as tf
- import numpy as np
- learning_rate = 0.01
- n_input = 2
- n_label = 1
- n_hidden = 2
- x = tf.placeholder(tf.float32,[None,n_input])
- y = tf.placeholder(tf.float32,[None,n_label])
- weights = {
- 'h1':tf.Variable(tf.truncated_normal([n_input,n_hidden],stddev=0.1)),
- 'h2':tf.Variable(tf.truncated_normal([n_hidden,n_label],stddev=0.1))
- }
- biases = {
- 'h1':tf.Variable(tf.zeros([n_hidden])),
- 'h2':tf.Variable(tf.zeros([n_label]))
- }
- layer1 = tf.nn.relu(tf.add(tf.matmul(x,weights['h1']),biases['h1']))
- y_pred = tf.nn.tanh(tf.add(tf.matmul(layer1,weights['h2']),biases['h2']))
- loss = tf.reduce_mean((y-y_pred)**2)
- train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss)
- x1 = [[0,0],[0,1],[1,0],[1,1]]
- y1 = [[0],[1],[1],[0]]
- x1 = np.array(x1).astype('float32')
- y1 = np.array(y1).astype('int16')
- with tf.Session() as sess:
- sess.run(tf.global_variables_initializer())
- for i in range(1000):
- sess.run(train_step,feed_dict={x:x1,y:y1})
- print(sess.run(y_pred,feed_dict={x:x1}))
- print(sess.run(layer1,feed_dict={x:x1}))