PaddlePaddle Softmax回归示例

Softmax回归


背景信息

本文在PaddlePaddle中通过自定义数据实现Softmax回归模型的验证。为简化理解,构造的数据集如下所示:

  • 分别取笛卡尔坐标系中四个象限内的数据,数据的标签值即为其象限值
  • 为简化处理,数据的绝对值不大于1
  • 数据取样为均匀分布

代码样例


#加载所需包 

import numpy as np
import paddle.v2 as paddle

# PaddlePaddle init
paddle.init(use_gpu=False, trainer_count=1)
# 训练数据准备
p1 = np.random.uniform(0,1,(1000,2))

t1=np.random.uniform(-1,0,(1000,1))
t2 = np.random.uniform(0,1,(1000,1))
p2 = np.c_[t1,t2]

p3 = np.random.uniform(-1,0,(1000,2))


t1 = np.random.uniform(0,1,(1000,1))
t2=np.random.uniform(-1,0,(1000,1))
p4 = np.c_[t1,t2]

data =np.r_[p1,p2,p3,p4]


# 准备Lable

l1=np.ones((1000,1))
l2=np.ones((1000,1))*2
l3=np.ones((1000,1))*3
l4=np.ones((1000,1))*4
label=np.r_[l1,l2,l3,l4]


# 将数据和标签组合
dataset=np.c_[data,label]

#生成train_reader 
def train_reader():   
    def reader():  
        for d in dataset:  
            yield (d[:-1]).astype(np.float32), int(d[-1:])
    return reader  
# 测试数据准备
p1 = np.random.uniform(0,1,(100,2))

t1=np.random.uniform(-1,0,(100,1))
t2 = np.random.uniform(0,1,(100,1))
p2 = np.c_[t1,t2]

p3 = np.random.uniform(-1,0,(100,2))


t1 = np.random.uniform(0,1,(100,1))
t2=np.random.uniform(-1,0,(100,1))
p4 = np.c_[t1,t2]

data =np.r_[p1,p2,p3,p4]


# 准备Lable

l1=np.ones((100,1))
l2=np.ones((100,1))*2
l3=np.ones((100,1))*3
l4=np.ones((100,1))*4
label=np.r_[l1,l2,l3,l4]


# 将数据和标签组合
dataset=np.c_[data,label]

#生成test_reader 
def test_reader():   
    def reader():  
        for d in dataset:  
            yield (d[:-1]).astype(np.float32), int(d[-1:])  
    return reader  

# 从测试数据集reader中提取测试数据和Label  
test_data = []  
test_label = []  
test_data_creator = test_reader()  
for item in test_data_creator():  
    test_data.append((item[0], ))  
    test_label.append(item[1])  
# 定义software模型
def softmax_regression(indata):
    predict = paddle.layer.fc(input=indata,
                              size=5,           # 此处输出需要为大于4的值,当设置为4时,jupyter运行时中断,这个原因有待了解
                              act=paddle.activation.Softmax())
    return predict
indatas = paddle.layer.data(
    name='indatas', type=paddle.data_type.dense_vector(2))
label = paddle.layer.data(
    name='label', type=paddle.data_type.integer_value(4))

predict = softmax_regression(indatas) # Softmax回归

cost = paddle.layer.classification_cost(input=predict, label=label)
parameters = paddle.parameters.create(cost)

optimizer = paddle.optimizer.Momentum(
    learning_rate=0.1 / 128.0,
    momentum=0.9,
    regularization=paddle.optimizer.L2Regularization(rate=0.0005 * 128))

trainer = paddle.trainer.SGD(cost=cost,
                             parameters=parameters,
                             update_equation=optimizer)
from paddle.v2.plot import Ploter

train_title = "Train cost"
test_title = "Test cost"
cost_ploter = Ploter(train_title, test_title)

step = 0

# event_handler to plot a figure
def event_handler_plot(event):
    global step
    if isinstance(event, paddle.event.EndIteration):
        if step % 10 == 0:
            cost_ploter.append(train_title, step, event.cost)
            cost_ploter.plot()
        step += 1
    if isinstance(event, paddle.event.EndPass):
        # save parameters
        with open('params_pass_%d.tar' % event.pass_id, 'w') as f:
            trainer.save_parameter_to_tar(f)

        result = trainer.test(reader=paddle.batch(
           test_reader(), batch_size=20))
        cost_ploter.append(test_title, step, result.cost)
feeding = {'pixel': 0, 'label': 1}  
trainer.train(  
    paddle.batch(  
        paddle.reader.shuffle(  
            train_reader(),buf_size=200),  
        batch_size=100),  
    feeding=feeding,  
    event_handler=event_handler_plot,  
    num_passes=20) 

# 从测试集中取部分数据进行验证
t1=test_data[99:101]
t2=test_label[99:101]
probs = paddle.infer(output_layer=predict, parameters=parameters, input=t1) 
for i in xrange(len(probs)):  
     print "label=" + str(t2[i]) + ", predict=" + str(probs[i,:].argmax(axis=0))  








猜你喜欢

转载自blog.csdn.net/wiborgite/article/details/79747980
今日推荐