使用tensorflow实现一个简单逻辑回归

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import input_data

minist = input_data.read_data_sets(“data/”, one_hot=True)

导入minist数据

trainimg = minist.train.images
trainlabel = minist.train.labels
testimg = minist.test.images
testlabel = minist.test.labels

看一下数据样式

print(trainimg.shape)
print(trainlabel.shape)
print(testimg.shape)
print(testlabel.shape)

(55000, 784)
(55000, 10)
(10000, 784)
(10000, 10)

x = tf.placeholder(“float”, [None,784])
y = tf.placeholder(“float”, [None,10])

W = tf.Variable(tf.zeros([784,10]))
b = tf.Variable(tf.zeros([10]))

print(b.shape) (10,)

actv = tf.nn.softmax(tf.matmul(x,W) + b)

oss = tf.reduce_mean(-tf.reduce_sum(y*tf.log(actv), reduction_indices=1))
learing_rate = 0.01
optm = tf.train.GradientDescentOptimizer(learing_rate).minimize(loss)

print(actv.shape) (?, 10)

print(y.shape) (?, 10)

测试一下

actv1 = tf.Variable([True,False,True,True,False,False,True,True,True,True])
z =tf.cast(actv1,”float”)
acc = tf.reduce_mean(tf.cast(actv1,”float”))
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
sess.run(acc)
sess.run(z)

猜你喜欢

转载自blog.csdn.net/qq_22230709/article/details/81585273
今日推荐