Python 计算程序运行多久代码

在程序开始设置

import time

start =time.clock()

sum=0

程序结束后

end = time.clock()
print('Running time: %s Seconds'%(end-start)) #单位是秒
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

from tensorflow.examples.tutorials.mnist import input_data

import time

start =time.clock()

sum=0
'''
# gpu:57.8853
# cpu :73.23
'''
mnist = input_data.read_data_sets('data/',one_hot=True)

trainimg  = mnist.train.images
trainlabel = mnist.train.labels
testimg  = mnist.test.images
testlabel = mnist.test.labels

for i in range(5):
    curr_img = np.reshape(trainimg[i,:],(28,28))
    curr_label = np.argmax(trainlabel[i,:])
    plt.matshow(curr_img,cmap = plt.get_cmap('gray'))
    plt.title(''+ str(i)+'th Training Data'+'Label is' + str(curr_label))
    # plt.show()

x = tf.placeholder(tf.float32,[None,784])
y = tf.placeholder(tf.float32,[None,10])

W = tf.Variable(tf.zeros([784,10]))
b = tf.Variable(tf.zeros([10]))
# tf.matmul(x,W)
with tf.device('/cpu:0'):
    actv = tf.nn.softmax(tf.matmul(x,W)+b) # ????
    cost = tf.reduce_mean(-tf.reduce_sum(y*tf.log(actv),reduction_indices=1))

    learning_rate =0.01
    optm = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

    pred = tf.equal(tf.argmax(actv,1),tf.argmax(y,1))   #argmax actv 最大值的索引 1,:代表行的索引
    # 预测值和真实值的索引是否相等,最大值索引是否相等

    accr = tf.reduce_mean(tf.cast(pred,'float')) #cast将pred转换float类型


init = tf.global_variables_initializer()

training_epochs =50
batch_size  =100
display_step   =5
sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))
sess.run(init)

for epoch in range(training_epochs):
    avg_cost = 0.
    num_batch = int(mnist.train.num_examples/batch_size)
    for i in range(num_batch):
        batch_xs, batch_ys = mnist.train.next_batch(batch_size)
        sess.run(optm,feed_dict={x:batch_xs,y:batch_ys})
        feeds ={x:batch_xs,y:batch_ys}
        avg_cost += sess.run(cost,feed_dict=feeds)/num_batch

    if epoch % display_step == 0:
        feeds_train ={x:batch_xs,y:batch_ys}
        feeds_test = {x:mnist.test.images, y:mnist.test.labels}
        train_acc = sess.run(accr,feed_dict=feeds_train)
        test_acc = sess.run(accr,feed_dict=feeds_test)
        print('Epoch:%03d/%03d cost:%.9f train_acc:%.3f test_acc:%.3f'
              % (epoch, training_epochs,avg_cost,train_acc,test_acc))

    # plt.show()
end = time.clock()

print('Running time: %s Seconds'%(end-start))
# gpu:57.8853
# cpu :73.23

猜你喜欢

转载自blog.csdn.net/weixin_42053726/article/details/85223239