tensorflow 实现 logistic regression

import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
#prepare data
dataX=np.random.randn(1000,15)
dataY=np.random.randint(0,2,1000).reshape(1000,1)
X=tf.placeholder(tf.float32,shape=[None,dataX.shape[1]])
Y=tf.placeholder(tf.float32,shape=[None,dataY.shape[1]])
#define fp
w=tf.Variable(tf.truncated_normal([dataX.shape[1],1]),dtype=tf.float32)
b=tf.Variable(tf.constant(0.1,shape=[1]),dtype=tf.float32)
y_hat=tf.sigmoid(tf.matmul(X,w)+b)
cross_ent_loss=-tf.reduce_mean(Y*tf.log(tf.clip_by_value(y_hat,1e-10,1.0))+(1-Y)*tf.log(tf.clip_by_value(1-y_hat,1e-10,1.0)))
lr = 1e-4
train_step = tf.train.AdamOptimizer(lr).minimize(cross_ent_loss)
saver=tf.train.Saver(tf.global_variables())
iter_nums=10000
loss_lst=[]
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for i in range(iter_nums):
        _,loss=sess.run([train_step,cross_ent_loss],feed_dict={X:dataX,Y:dataY})
        print(loss)
        y_res=sess.run(y_hat,feed_dict={X:dataX})
        y_label=np.round(y_res)
        print(sum(y_label==dataY)/len(y_label))
        loss_lst.append(loss)

fig,ax1=plt.subplots(1,1,figsize=(10,5))
ax1.plot(loss_lst,c='red')
plt.show()

由于预测值是标量,需要交叉熵函数需要y*logy_hat+(1-y)*log(1-y_hat)

如果是one hot向量,仅需要y*logy_hat,如图所示:

猜你喜欢

转载自blog.csdn.net/ningyanggege/article/details/84860826