Tensorflow Test1

import tensorflow as tf  
from numpy.random import RandomState

def test():
    print("test start")
    #定义训练数据batch的大小
    batch_size = 8
    #定义神经网络参数
    w1 = tf.Variable(tf.random_normal([2,3],stddev = 1 , seed = 1))
    w2 = tf.Variable(tf.random_normal([3,1],stddev = 1 , seed = 1))


    x = tf.placeholder(tf.float32,shape = (None,2),name="x-input")
    y_ = tf.placeholder(tf.float32,shape = (None,1),name="y_input")


    #定义神经网络向前传播的过程
    a = tf.matmul(x,w1)
    y = tf.matmul(a,w2)


    #定义损失函数(没看懂)
    cross_entropy = -tf.reduce_mean( y_ * tf.log( tf.clip_by_value( y , 1e-10 , 1.0 )))
    train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)


    rdm = RandomState(1)
    dataset_size = 128
    X = rdm.rand(dataset_size,2)
    print(X)
    Y = [ [int( x1 + x2  < 1)] for (x1, x2) in X]
    print(Y)

    #创建会话
    with tf.Session() as sess:
        #初始化变量;
        init_op = tf.initialize_all_variables()
        sess.run(init_op)
        print( sess.run(w1) )
        print( sess.run(w2) )
        #设定训练次数;
        step = 5000
        for i in range(step):
             start = (i * batch_size) % dataset_size
             end = min(start+batch_size,dataset_size)
             sess.run( train_step , feed_dict = { x : X[ start : end ] , y_ : Y [start : end ]})
             if i % 1000 == 0:
                 total_cross_entropy = sess.run( cross_entropy,feed_dict = { x : X ,y_: Y})
                 print ("After %d tranining setp(s),cross entropy on all data is %g"% ( i , total_cross_entropy) )
        print( sess.run(w1) )
        print( sess.run(w2) )
if __name__=="__main__":
    test()

猜你喜欢

转载自blog.csdn.net/d710055071/article/details/80470380