onnx model laye correspond caffe out

onnx model
for example conv

##merge bn
input: "data"
input: "247"
input: "249"
output: "165"
op_type: "Conv"
attribute {
  name: "dilations"
  ints: 1
  ints: 1
  type: INTS
}
attribute {
  name: "group"
  i: 1
  type: INT
}
attribute {
  name: "kernel_shape"
  ints: 3
  ints: 3
  type: INTS
}
attribute {
  name: "pads"
  ints: 1
  ints: 1
  ints: 1
  ints: 1
  type: INTS
}
attribute {
  name: "strides"
  ints: 2
  ints: 2
  type: INTS
}

########## not to simplify.py
input: "data"
input: "mobilenetv1_0"
output: "165"
op_type: "Conv"
attribute {
  name: "dilations"
  ints: 1
  ints: 1
  type: INTS
}
attribute {
  name: "group"
  i: 1
  type: INT
}
attribute {
  name: "kernel_shape"
  ints: 3
  ints: 3
  type: INTS
}
attribute {
  name: "pads"
  ints: 1
  ints: 1
  ints: 1
  ints: 1
  type: INTS
}
attribute {
  name: "strides"
  ints: 2
  ints: 2
  type: INTS
}

input: "165"
input: "mobilenetv1_1"
input: "mobilenetv1_2"
input: "mobilenetv1_3"
input: "mobilenetv1_4"
output: "166"
op_type: "BatchNormalization"
attribute {
  name: "epsilon"
  f: 9.99999974738e-06
  type: FLOAT
}
attribute {
  name: "momentum"
  f: 0.899999976158
  type: FLOAT
}


to caffe

def _convert_conv(node, graph, err):
    weight_name = node.inputs[1]  #layer name
    bottom_name = node.inputs[0]  #layer input,onnx all input
    top_name = node.outputs[0]   #layer out
    layer_name = node.name
    print node.input_tensors #will

    weight_param = None
    if weight_name in node.input_tensors:  #this is input layer,
        weight_param = node.input_tensors[weight_name]
    else:
        err.missing_initializer(node, "Weight tensor: {} not found in the graph initializer".format(weight_name,))

    bias_param = None
    if len(node.inputs) > 2:
        bias_param = node.input_tensors[node.inputs[2]]

    bias_term = True
    if bias_param is None:
        bias_term = False

    channel_dim = weight_param.shape[0]

    dilations = node.attrs["dilations"]
    assert dilations[0] == dilations[1]
    dilation = dilations[0]

    group = node.attrs.get("group", 1)

    kernel_shape = node.attrs["kernel_shape"]
    kernel_h = kernel_shape[0]
    kernel_w = kernel_shape[1]

    pads = node.attrs.get("pads")
    pad_h = pads[0]
    pad_w = pads[1]

    strides = node.attrs["strides"]
    stride_h = strides[0]
    stride_w = strides[1]

    if not bias_term:
        # parameters for convolution layer with batchnorm.
        kwargs = {
            'param': [dict(lr_mult=1, decay_mult=1)],
            'weight_filler': dict(type='gaussian', std=0.01),
        }
    else:
        kwargs = {
            'param': [
                dict(lr_mult=1, decay_mult=1),
                dict(lr_mult=2, decay_mult=0)],
            'weight_filler': dict(type='xavier'),
            'bias_filler': dict(type='constant', value=0)
        }

    layer = Func(
        "Convolution",
        layer_name,
        [bottom_name],
        [top_name],
        kernel_h=kernel_h,
        kernel_w=kernel_w,
        stride_h=stride_h,
        stride_w=stride_w,
        group=group,
        pad_h=pad_h,
        pad_w=pad_w,
        num_output=channel_dim,
        dilation=dilation,
        bias_term=bias_term,
        **kwargs
    )

    graph.channel_dims[top_name] = channel_dim

    return layer

print param


print node.input_tensors
{u'247': array([[[[ 0.02320949,  0.0111992 , -0.02545058],
         [-0.13549525,  0.11192021,  0.19569604],
         [ 0.05308857, -0.05233441, -0.03859574]],

        [[ 0.057078  , -0.080784  , -0.0868296 ],
         [-0.13128875,  0.15115787,  0.25879627],
         [ 0.03537943,  0.00320818,  0.02913299]],.....
       [[-0.00541021, -0.01009966,  0.00064035],
         [-0.00119456,  0.00344573, -0.00428323],
         [-0.00493221,  0.00212613, -0.01768959]]]], dtype=float32), u'249': array([ 0.6327247 , -0.65135384,  0.91262054, -1.4501885 ,  0.96663123,
        0.60971206, -0.72289187, -0.81442183], dtype=float32)}

print   node.inputs[1],node.inputs[0],node.inputs[2]
247  data 249

print node.attrs.get("group")
1

print node.attrs.get("group")[0]  #one value
TypeError: 'long' object has no attribute '__getitem__'


print node.attrs
{u'dilations': [1L, 1L], u'strides': [2L, 2L], u'pads': [1L, 1L, 1L, 1L], u'group': 1L, u'kernel_shape': [3L, 3L]}


发布了98 篇原创文章 · 获赞 141 · 访问量 26万+

猜你喜欢

转载自blog.csdn.net/m0_37192554/article/details/103507590
今日推荐