Tensorflow:实战Google深度学习框架
code copy : hello word
import tensorflow as tf w1=tf.random_normal([2,3],stddev=1,seed=1) w2=tf.random_normal([3,1],stddev=1,seed=1) x=tf.constant([[0.7,0.9]]) a=tf.matmul(x,w1) y=tf.matmul(a,w2) sess=tf.Session() #sess.run(w1) #sess.run(w2) print(sess.run(y)) sess.close()
output :[[ 3.95757794]]
with placehoder:
import tensorflow as tf w1=tf.random_normal([2,3],stddev=1,seed=1) w2=tf.random_normal([3,1],stddev=1,seed=1) #x=tf.constant([[0.7,0.9]]) x=tf.placeholder(tf.float32,shape=(3,2),name="input") a=tf.matmul(x,w1) y=tf.matmul(a,w2) sess=tf.Session() #sess.run(w1) #sess.run(w2) #print(sess.run(y)) print(sess.run(y,feed_dict={x:[[0.7,0.9],[0.1,0.4],[0.5,0.8]]})) sess.close()
output :[[ 3.95757794]
[ 1.15376544]
[ 3.16749191]]
full code to train :
import tensorflow as tf from numpy.random import RandomState batch_size=8 w1=tf.Variable(tf.random_normal([2,3],stddev=1,seed=1)) w2=tf.Variable(tf.random_normal([3,1],stddev=1,seed=1)) #x=tf.constant([[0.7,0.9]]) x=tf.placeholder(tf.float32,shape=(None,2),name="x-input") y_=tf.placeholder(tf.float32,shape=(None,1),name="y-input") a=tf.matmul(x,w1) y=tf.matmul(a,w2) cross_entropy=-tf.reduce_mean(y_*tf.log(tf.clip_by_value(y,1e-10,1.0))) train_step=tf.train.AdamOptimizer(0.001).minimize(cross_entropy) rdm=RandomState(1) dataset_size=128 X=rdm.rand(dataset_size,2) Y=[[int(x1+x2<1)] for (x1,x2) in X] with tf.Session() as sess: #sess.run(w1) #sess.run(w2) #print(sess.run(y)) init_op=tf.global_variables_initializer() sess.run(init_op) print(sess.run(w1)) print(sess.run(w2)) STEPS=5000 for i in range(STEPS): start=(i*batch_size)%dataset_size end=min(start+batch_size,dataset_size) sess.run(train_step,feed_dict={x:X[start:end],y_:Y[start:end]}) if i%1000 ==0: total_cross_entropy=sess.run(cross_entropy,feed_dict={x:X,y_:Y}) print("After %d training steps,cross entropy on all data is %g"%(i,total_cross_entropy)) print(sess.run(w1)) print(sess.run(w2)) #print(sess.run(y,feed_dict={x:[[0.7,0.9],[0.1,0.4],[0.5,0.8]]})) #sess.close()
output:[[-0.81131822 1.48459876 0.06532937]
[-2.44270396 0.0992484 0.59122431]]
[[-0.81131822]
[ 1.48459876]
[ 0.06532937]]
After 0 training steps,cross entropy on all data is 0.0674925
After 1000 training steps,cross entropy on all data is 0.0163385
After 2000 training steps,cross entropy on all data is 0.00907547
After 3000 training steps,cross entropy on all data is 0.00714436
After 4000 training steps,cross entropy on all data is 0.00578471
[[-1.9618274 2.58235407 1.68203783]
[-3.4681716 1.06982327 2.11788988]]
[[-1.8247149 ]
[ 2.68546653]
[ 1.41819501]]