Write train_test.prototxt and deploy.prototxt using pycaffe

write_lenet(trainlmdb,
                    testlmdb,
                    batch_size_train=64,
                    batch_size_test = 10,
                    isdeploy = False):

    # our version of LeNet: a series of linear and simple nonlinear transformations
    n = caffe.NetSpec() 
    ntest = caffe.NetSpec()
    path = '/home/sailist/GIt/intel/caffe/myExamples/cell_lenet/'
    train_path = path + 'train_test.prototxt'
    deploy_path = path + 'deploy.prototxt'

    if True:            
        n.data, n.label = L.Data(batch_size=batch_size_train, 
                             backend=P.Data.LMDB, 
                             source=trainlmdb,
                             transform_param=dict(scale=1./255,mean_value=[225, 213, 234]), 
                             include = dict(phase=caffe.TRAIN),
                             ntop=2)
        ntest.data, ntest.label = L.Data(batch_size=batch_size_test, 
                             backend=P.Data.LMDB, 
                             source=testlmdb,
                             transform_param=dict(scale=1./255), 
                             include = dict(phase=caffe.TEST),
                             ntop=2)    

    n.conv1 = L.Convolution(n.data, kernel_size=5, num_output=20, weight_filler=dict(type='xavier'))
    n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
    n.conv2 = L.Convolution(n.pool1, kernel_size=5, num_output=50, weight_filler=dict(type='xavier'))
    n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)
    n.ip1 =   L.InnerProduct(n.pool2, num_output=500, weight_filler=dict(type='xavier'))
    n.relu1 = L.ReLU(n.ip1, in_place=True)
    n.ip2 = L.InnerProduct(n.relu1, num_output=10, weight_filler=dict(type='xavier'))
    n.soft = L.Softmax(n.ip2)
    if not isdeploy:
        n.loss =  L.SoftmaxWithLoss(n.ip2, n.label)
        n.acc =L.Accuracy(n.ip2,n.label)


    if not isdeploy:
        outputs = str(ntest.to_proto())+str(n.to_proto())
        #outputs = remove_drop(outputs)
        with open(train_path, 'w') as f:
            f.write(outputs) 
    else:
        outputs = str(n.to_proto())
        #outputs = remove_drop(outputs)
        with open(deploy_path, 'w') as f:
            f.write(outputs)     

    return n.to_proto() #写入到prototxt文件
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52

In this way, deploy.prototxt can be generated at the same time, but the beginning of deploy still needs to be changed, that is, delete the data layer and add the following content

layer {
  name: "data"
  type: "Input"
  top: "data"
  input_param { shape: { dim: 10 dim: 3 dim: 100 dim: 100 } }
}
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
def change_deploy():
    path = '/home/sailist/GIt/intel/caffe/myExamples/cell_all_alexnet/'
    start = path + 'start_deploy.txt'
    deploy_path = path + 'c_deploy.prototxt'
    new_path = path + "deploy.prototxt"
    with open(deploy_path, 'r') as f:
        inputs = "".join((f.readlines()))
        spinput = inputs.split("layer")
        with open(start,'r') as f2:
            start_ = f2.readlines()
            #print(spinput[0])            
            spinput[0] = "".join(start_)
            #print(spinput[1])
            del(spinput[1])
        inputs = "layer".join(spinput)
        with open(new_path,"w") as f2:
            f2.write(inputs)
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17

I used this method to automate it.
Finally , just call

write_cell_net(train_lmdb_fname,test_lmdb_fname)
write_cell_net(train_lmdb_fname,test_lmdb_fname,isdeploy = True)
change_deploy()
  • 1
  • 2
  • 3

You can generate train_test.prototxt and deploy.prototxt at the same time. In fact, the key is to master the transfer relationship between levels

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=325661648&siteId=291194637