name: "retinaface" input: "blob1" input_dim: 1 input_dim: 3 input_dim: 640 input_dim: 640 layer { name: "conv1" type: "Convolution" bottom: "blob1" top: "conv_blob1" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm1" type: "BatchNorm" bottom: "conv_blob1" top: "batch_norm_blob1" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale1" type: "Scale" bottom: "batch_norm_blob1" top: "batch_norm_blob1" scale_param { bias_term: true } } layer { name: "relu61" type: "ReLU6" bottom: "batch_norm_blob1" top: "relu6_blob1" } layer { name: "max_pool1" type: "Pooling" bottom: "relu6_blob1" top: "max_pool_blob1" pooling_param { pool: MAX kernel_size: 3 stride: 2 pad: 1 ceil_mode: true } } layer { name: "conv2" type: "Convolution" bottom: "max_pool_blob1" top: "conv_blob2" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm2" type: "BatchNorm" bottom: "conv_blob2" top: "batch_norm_blob2" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale2" type: "Scale" bottom: "batch_norm_blob2" top: "batch_norm_blob2" scale_param { bias_term: true } } layer { name: "relu62" type: "ReLU6" bottom: "batch_norm_blob2" top: "relu6_blob2" } layer { name: "max_pool2" type: "Pooling" bottom: "relu6_blob2" top: "max_pool_blob2" pooling_param { pool: MAX kernel_size: 3 stride: 1 pad: 1 ceil_mode: true } } layer { name: "max_pool3" type: "Pooling" bottom: "relu6_blob2" top: "max_pool_blob3" pooling_param { pool: MAX kernel_size: 5 stride: 1 pad: 2 ceil_mode: true } } layer { name: "cat1" type: "Concat" bottom: "relu6_blob2" bottom: "max_pool_blob2" bottom: "max_pool_blob3" top: "cat_blob1" concat_param { axis: 1 } } layer { name: "conv3" type: "Convolution" bottom: "cat_blob1" top: "conv_blob3" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm3" type: "BatchNorm" bottom: "conv_blob3" top: "batch_norm_blob3" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale3" type: "Scale" bottom: "batch_norm_blob3" top: "batch_norm_blob3" scale_param { bias_term: true } } layer { name: "relu63" type: "ReLU6" bottom: "batch_norm_blob3" top: "relu6_blob3" } layer { name: "split1" type: "Slice" bottom: "relu6_blob3" top: "split_blob1" top: "split_blob2" slice_param { slice_point: 24 axis: 1 } } layer { name: "conv4" type: "Convolution" bottom: "split_blob2" top: "conv_blob4" convolution_param { num_output: 24 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm4" type: "BatchNorm" bottom: "conv_blob4" top: "batch_norm_blob4" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale4" type: "Scale" bottom: "batch_norm_blob4" top: "batch_norm_blob4" scale_param { bias_term: true } } layer { name: "relu64" type: "ReLU6" bottom: "batch_norm_blob4" top: "relu6_blob4" } layer { name: "conv5" type: "Convolution" bottom: "relu6_blob4" top: "conv_blob5" convolution_param { num_output: 24 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm5" type: "BatchNorm" bottom: "conv_blob5" top: "batch_norm_blob5" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale5" type: "Scale" bottom: "batch_norm_blob5" top: "batch_norm_blob5" scale_param { bias_term: true } } layer { name: "relu65" type: "ReLU6" bottom: "batch_norm_blob5" top: "relu6_blob5" } layer { name: "conv6" type: "Convolution" bottom: "relu6_blob5" top: "conv_blob6" convolution_param { num_output: 24 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm6" type: "BatchNorm" bottom: "conv_blob6" top: "batch_norm_blob6" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale6" type: "Scale" bottom: "batch_norm_blob6" top: "batch_norm_blob6" scale_param { bias_term: true } } layer { name: "add1" type: "Eltwise" bottom: "split_blob2" bottom: "batch_norm_blob6" top: "add_blob1" eltwise_param { operation: SUM } } layer { name: "conv7" type: "Convolution" bottom: "add_blob1" top: "conv_blob7" convolution_param { num_output: 24 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm7" type: "BatchNorm" bottom: "conv_blob7" top: "batch_norm_blob7" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale7" type: "Scale" bottom: "batch_norm_blob7" top: "batch_norm_blob7" scale_param { bias_term: true } } layer { name: "relu66" type: "ReLU6" bottom: "batch_norm_blob7" top: "relu6_blob6" } layer { name: "conv8" type: "Convolution" bottom: "relu6_blob6" top: "conv_blob8" convolution_param { num_output: 24 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm8" type: "BatchNorm" bottom: "conv_blob8" top: "batch_norm_blob8" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale8" type: "Scale" bottom: "batch_norm_blob8" top: "batch_norm_blob8" scale_param { bias_term: true } } layer { name: "relu67" type: "ReLU6" bottom: "batch_norm_blob8" top: "relu6_blob7" } layer { name: "conv9" type: "Convolution" bottom: "relu6_blob7" top: "conv_blob9" convolution_param { num_output: 24 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm9" type: "BatchNorm" bottom: "conv_blob9" top: "batch_norm_blob9" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale9" type: "Scale" bottom: "batch_norm_blob9" top: "batch_norm_blob9" scale_param { bias_term: true } } layer { name: "add2" type: "Eltwise" bottom: "add_blob1" bottom: "batch_norm_blob9" top: "add_blob2" eltwise_param { operation: SUM } } layer { name: "cat2" type: "Concat" bottom: "split_blob1" bottom: "add_blob2" top: "cat_blob2" concat_param { axis: 1 } } layer { name: "conv10" type: "Convolution" bottom: "cat_blob2" top: "conv_blob10" convolution_param { num_output: 48 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm10" type: "BatchNorm" bottom: "conv_blob10" top: "batch_norm_blob10" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale10" type: "Scale" bottom: "batch_norm_blob10" top: "batch_norm_blob10" scale_param { bias_term: true } } layer { name: "relu68" type: "ReLU6" bottom: "batch_norm_blob10" top: "relu6_blob8" } layer { name: "max_pool4" type: "Pooling" bottom: "relu6_blob8" top: "max_pool_blob4" pooling_param { pool: MAX kernel_size: 3 stride: 1 pad: 1 ceil_mode: true } } layer { name: "max_pool5" type: "Pooling" bottom: "relu6_blob8" top: "max_pool_blob5" pooling_param { pool: MAX kernel_size: 5 stride: 1 pad: 2 ceil_mode: true } } layer { name: "cat3" type: "Concat" bottom: "relu6_blob8" bottom: "max_pool_blob4" bottom: "max_pool_blob5" top: "cat_blob3" concat_param { axis: 1 } } layer { name: "conv11" type: "Convolution" bottom: "cat_blob3" top: "conv_blob11" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm11" type: "BatchNorm" bottom: "conv_blob11" top: "batch_norm_blob11" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale11" type: "Scale" bottom: "batch_norm_blob11" top: "batch_norm_blob11" scale_param { bias_term: true } } layer { name: "relu69" type: "ReLU6" bottom: "batch_norm_blob11" top: "relu6_blob9" } layer { name: "split2" type: "Slice" bottom: "relu6_blob9" top: "split_blob3" top: "split_blob4" slice_param { slice_point: 32 axis: 1 } } layer { name: "conv12" type: "Convolution" bottom: "split_blob4" top: "conv_blob12" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm12" type: "BatchNorm" bottom: "conv_blob12" top: "batch_norm_blob12" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale12" type: "Scale" bottom: "batch_norm_blob12" top: "batch_norm_blob12" scale_param { bias_term: true } } layer { name: "relu610" type: "ReLU6" bottom: "batch_norm_blob12" top: "relu6_blob10" } layer { name: "conv13" type: "Convolution" bottom: "relu6_blob10" top: "conv_blob13" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm13" type: "BatchNorm" bottom: "conv_blob13" top: "batch_norm_blob13" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale13" type: "Scale" bottom: "batch_norm_blob13" top: "batch_norm_blob13" scale_param { bias_term: true } } layer { name: "relu611" type: "ReLU6" bottom: "batch_norm_blob13" top: "relu6_blob11" } layer { name: "conv14" type: "Convolution" bottom: "relu6_blob11" top: "conv_blob14" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm14" type: "BatchNorm" bottom: "conv_blob14" top: "batch_norm_blob14" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale14" type: "Scale" bottom: "batch_norm_blob14" top: "batch_norm_blob14" scale_param { bias_term: true } } layer { name: "add3" type: "Eltwise" bottom: "split_blob4" bottom: "batch_norm_blob14" top: "add_blob3" eltwise_param { operation: SUM } } layer { name: "conv15" type: "Convolution" bottom: "add_blob3" top: "conv_blob15" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm15" type: "BatchNorm" bottom: "conv_blob15" top: "batch_norm_blob15" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale15" type: "Scale" bottom: "batch_norm_blob15" top: "batch_norm_blob15" scale_param { bias_term: true } } layer { name: "relu612" type: "ReLU6" bottom: "batch_norm_blob15" top: "relu6_blob12" } layer { name: "conv16" type: "Convolution" bottom: "relu6_blob12" top: "conv_blob16" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm16" type: "BatchNorm" bottom: "conv_blob16" top: "batch_norm_blob16" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale16" type: "Scale" bottom: "batch_norm_blob16" top: "batch_norm_blob16" scale_param { bias_term: true } } layer { name: "relu613" type: "ReLU6" bottom: "batch_norm_blob16" top: "relu6_blob13" } layer { name: "conv17" type: "Convolution" bottom: "relu6_blob13" top: "conv_blob17" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm17" type: "BatchNorm" bottom: "conv_blob17" top: "batch_norm_blob17" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale17" type: "Scale" bottom: "batch_norm_blob17" top: "batch_norm_blob17" scale_param { bias_term: true } } layer { name: "add4" type: "Eltwise" bottom: "add_blob3" bottom: "batch_norm_blob17" top: "add_blob4" eltwise_param { operation: SUM } } layer { name: "conv18" type: "Convolution" bottom: "add_blob4" top: "conv_blob18" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm18" type: "BatchNorm" bottom: "conv_blob18" top: "batch_norm_blob18" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale18" type: "Scale" bottom: "batch_norm_blob18" top: "batch_norm_blob18" scale_param { bias_term: true } } layer { name: "relu614" type: "ReLU6" bottom: "batch_norm_blob18" top: "relu6_blob14" } layer { name: "conv19" type: "Convolution" bottom: "relu6_blob14" top: "conv_blob19" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm19" type: "BatchNorm" bottom: "conv_blob19" top: "batch_norm_blob19" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale19" type: "Scale" bottom: "batch_norm_blob19" top: "batch_norm_blob19" scale_param { bias_term: true } } layer { name: "relu615" type: "ReLU6" bottom: "batch_norm_blob19" top: "relu6_blob15" } layer { name: "conv20" type: "Convolution" bottom: "relu6_blob15" top: "conv_blob20" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm20" type: "BatchNorm" bottom: "conv_blob20" top: "batch_norm_blob20" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale20" type: "Scale" bottom: "batch_norm_blob20" top: "batch_norm_blob20" scale_param { bias_term: true } } layer { name: "add5" type: "Eltwise" bottom: "add_blob4" bottom: "batch_norm_blob20" top: "add_blob5" eltwise_param { operation: SUM } } layer { name: "cat4" type: "Concat" bottom: "split_blob3" bottom: "add_blob5" top: "cat_blob4" concat_param { axis: 1 } } layer { name: "conv21" type: "Convolution" bottom: "cat_blob4" top: "conv_blob21" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm21" type: "BatchNorm" bottom: "conv_blob21" top: "batch_norm_blob21" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale21" type: "Scale" bottom: "batch_norm_blob21" top: "batch_norm_blob21" scale_param { bias_term: true } } layer { name: "relu616" type: "ReLU6" bottom: "batch_norm_blob21" top: "relu6_blob16" } layer { name: "max_pool6" type: "Pooling" bottom: "relu6_blob16" top: "max_pool_blob6" pooling_param { pool: MAX kernel_size: 3 stride: 1 pad: 1 ceil_mode: true } } layer { name: "max_pool7" type: "Pooling" bottom: "relu6_blob16" top: "max_pool_blob7" pooling_param { pool: MAX kernel_size: 5 stride: 1 pad: 2 ceil_mode: true } } layer { name: "cat5" type: "Concat" bottom: "relu6_blob16" bottom: "max_pool_blob6" bottom: "max_pool_blob7" top: "cat_blob5" concat_param { axis: 1 } } layer { name: "conv22" type: "Convolution" bottom: "cat_blob5" top: "conv_blob22" convolution_param { num_output: 96 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm22" type: "BatchNorm" bottom: "conv_blob22" top: "batch_norm_blob22" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale22" type: "Scale" bottom: "batch_norm_blob22" top: "batch_norm_blob22" scale_param { bias_term: true } } layer { name: "relu617" type: "ReLU6" bottom: "batch_norm_blob22" top: "relu6_blob17" } layer { name: "split3" type: "Slice" bottom: "relu6_blob17" top: "split_blob5" top: "split_blob6" slice_param { slice_point: 48 axis: 1 } } layer { name: "conv23" type: "Convolution" bottom: "split_blob6" top: "conv_blob23" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm23" type: "BatchNorm" bottom: "conv_blob23" top: "batch_norm_blob23" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale23" type: "Scale" bottom: "batch_norm_blob23" top: "batch_norm_blob23" scale_param { bias_term: true } } layer { name: "relu618" type: "ReLU6" bottom: "batch_norm_blob23" top: "relu6_blob18" } layer { name: "conv24" type: "Convolution" bottom: "relu6_blob18" top: "conv_blob24" convolution_param { num_output: 48 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm24" type: "BatchNorm" bottom: "conv_blob24" top: "batch_norm_blob24" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale24" type: "Scale" bottom: "batch_norm_blob24" top: "batch_norm_blob24" scale_param { bias_term: true } } layer { name: "relu619" type: "ReLU6" bottom: "batch_norm_blob24" top: "relu6_blob19" } layer { name: "conv25" type: "Convolution" bottom: "relu6_blob19" top: "conv_blob25" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm25" type: "BatchNorm" bottom: "conv_blob25" top: "batch_norm_blob25" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale25" type: "Scale" bottom: "batch_norm_blob25" top: "batch_norm_blob25" scale_param { bias_term: true } } layer { name: "add6" type: "Eltwise" bottom: "split_blob6" bottom: "batch_norm_blob25" top: "add_blob6" eltwise_param { operation: SUM } } layer { name: "conv26" type: "Convolution" bottom: "add_blob6" top: "conv_blob26" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm26" type: "BatchNorm" bottom: "conv_blob26" top: "batch_norm_blob26" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale26" type: "Scale" bottom: "batch_norm_blob26" top: "batch_norm_blob26" scale_param { bias_term: true } } layer { name: "relu620" type: "ReLU6" bottom: "batch_norm_blob26" top: "relu6_blob20" } layer { name: "conv27" type: "Convolution" bottom: "relu6_blob20" top: "conv_blob27" convolution_param { num_output: 48 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm27" type: "BatchNorm" bottom: "conv_blob27" top: "batch_norm_blob27" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale27" type: "Scale" bottom: "batch_norm_blob27" top: "batch_norm_blob27" scale_param { bias_term: true } } layer { name: "relu621" type: "ReLU6" bottom: "batch_norm_blob27" top: "relu6_blob21" } layer { name: "conv28" type: "Convolution" bottom: "relu6_blob21" top: "conv_blob28" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm28" type: "BatchNorm" bottom: "conv_blob28" top: "batch_norm_blob28" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale28" type: "Scale" bottom: "batch_norm_blob28" top: "batch_norm_blob28" scale_param { bias_term: true } } layer { name: "add7" type: "Eltwise" bottom: "add_blob6" bottom: "batch_norm_blob28" top: "add_blob7" eltwise_param { operation: SUM } } layer { name: "conv29" type: "Convolution" bottom: "add_blob7" top: "conv_blob29" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm29" type: "BatchNorm" bottom: "conv_blob29" top: "batch_norm_blob29" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale29" type: "Scale" bottom: "batch_norm_blob29" top: "batch_norm_blob29" scale_param { bias_term: true } } layer { name: "relu622" type: "ReLU6" bottom: "batch_norm_blob29" top: "relu6_blob22" } layer { name: "conv30" type: "Convolution" bottom: "relu6_blob22" top: "conv_blob30" convolution_param { num_output: 48 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm30" type: "BatchNorm" bottom: "conv_blob30" top: "batch_norm_blob30" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale30" type: "Scale" bottom: "batch_norm_blob30" top: "batch_norm_blob30" scale_param { bias_term: true } } layer { name: "relu623" type: "ReLU6" bottom: "batch_norm_blob30" top: "relu6_blob23" } layer { name: "conv31" type: "Convolution" bottom: "relu6_blob23" top: "conv_blob31" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm31" type: "BatchNorm" bottom: "conv_blob31" top: "batch_norm_blob31" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale31" type: "Scale" bottom: "batch_norm_blob31" top: "batch_norm_blob31" scale_param { bias_term: true } } layer { name: "add8" type: "Eltwise" bottom: "add_blob7" bottom: "batch_norm_blob31" top: "add_blob8" eltwise_param { operation: SUM } } layer { name: "conv32" type: "Convolution" bottom: "add_blob8" top: "conv_blob32" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm32" type: "BatchNorm" bottom: "conv_blob32" top: "batch_norm_blob32" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale32" type: "Scale" bottom: "batch_norm_blob32" top: "batch_norm_blob32" scale_param { bias_term: true } } layer { name: "relu624" type: "ReLU6" bottom: "batch_norm_blob32" top: "relu6_blob24" } layer { name: "conv33" type: "Convolution" bottom: "relu6_blob24" top: "conv_blob33" convolution_param { num_output: 48 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm33" type: "BatchNorm" bottom: "conv_blob33" top: "batch_norm_blob33" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale33" type: "Scale" bottom: "batch_norm_blob33" top: "batch_norm_blob33" scale_param { bias_term: true } } layer { name: "relu625" type: "ReLU6" bottom: "batch_norm_blob33" top: "relu6_blob25" } layer { name: "conv34" type: "Convolution" bottom: "relu6_blob25" top: "conv_blob34" convolution_param { num_output: 48 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm34" type: "BatchNorm" bottom: "conv_blob34" top: "batch_norm_blob34" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale34" type: "Scale" bottom: "batch_norm_blob34" top: "batch_norm_blob34" scale_param { bias_term: true } } layer { name: "add9" type: "Eltwise" bottom: "add_blob8" bottom: "batch_norm_blob34" top: "add_blob9" eltwise_param { operation: SUM } } layer { name: "cat6" type: "Concat" bottom: "split_blob5" bottom: "add_blob9" top: "cat_blob6" concat_param { axis: 1 } } layer { name: "conv35" type: "Convolution" bottom: "cat_blob6" top: "conv_blob35" convolution_param { num_output: 96 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm35" type: "BatchNorm" bottom: "conv_blob35" top: "batch_norm_blob35" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale35" type: "Scale" bottom: "batch_norm_blob35" top: "batch_norm_blob35" scale_param { bias_term: true } } layer { name: "relu626" type: "ReLU6" bottom: "batch_norm_blob35" top: "relu6_blob26" } layer { name: "max_pool8" type: "Pooling" bottom: "relu6_blob26" top: "max_pool_blob8" pooling_param { pool: MAX kernel_size: 3 stride: 1 pad: 1 ceil_mode: true } } layer { name: "max_pool9" type: "Pooling" bottom: "relu6_blob26" top: "max_pool_blob9" pooling_param { pool: MAX kernel_size: 5 stride: 1 pad: 2 ceil_mode: true } } layer { name: "cat7" type: "Concat" bottom: "relu6_blob26" bottom: "max_pool_blob8" bottom: "max_pool_blob9" top: "cat_blob7" concat_param { axis: 1 } } layer { name: "conv36" type: "Convolution" bottom: "cat_blob7" top: "conv_blob36" convolution_param { num_output: 128 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm36" type: "BatchNorm" bottom: "conv_blob36" top: "batch_norm_blob36" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale36" type: "Scale" bottom: "batch_norm_blob36" top: "batch_norm_blob36" scale_param { bias_term: true } } layer { name: "relu627" type: "ReLU6" bottom: "batch_norm_blob36" top: "relu6_blob27" } layer { name: "split4" type: "Slice" bottom: "relu6_blob27" top: "split_blob7" top: "split_blob8" slice_param { slice_point: 64 axis: 1 } } layer { name: "conv37" type: "Convolution" bottom: "split_blob8" top: "conv_blob37" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm37" type: "BatchNorm" bottom: "conv_blob37" top: "batch_norm_blob37" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale37" type: "Scale" bottom: "batch_norm_blob37" top: "batch_norm_blob37" scale_param { bias_term: true } } layer { name: "relu628" type: "ReLU6" bottom: "batch_norm_blob37" top: "relu6_blob28" } layer { name: "conv38" type: "Convolution" bottom: "relu6_blob28" top: "conv_blob38" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm38" type: "BatchNorm" bottom: "conv_blob38" top: "batch_norm_blob38" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale38" type: "Scale" bottom: "batch_norm_blob38" top: "batch_norm_blob38" scale_param { bias_term: true } } layer { name: "relu629" type: "ReLU6" bottom: "batch_norm_blob38" top: "relu6_blob29" } layer { name: "conv39" type: "Convolution" bottom: "relu6_blob29" top: "conv_blob39" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm39" type: "BatchNorm" bottom: "conv_blob39" top: "batch_norm_blob39" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale39" type: "Scale" bottom: "batch_norm_blob39" top: "batch_norm_blob39" scale_param { bias_term: true } } layer { name: "add10" type: "Eltwise" bottom: "split_blob8" bottom: "batch_norm_blob39" top: "add_blob10" eltwise_param { operation: SUM } } layer { name: "conv40" type: "Convolution" bottom: "add_blob10" top: "conv_blob40" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm40" type: "BatchNorm" bottom: "conv_blob40" top: "batch_norm_blob40" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale40" type: "Scale" bottom: "batch_norm_blob40" top: "batch_norm_blob40" scale_param { bias_term: true } } layer { name: "relu630" type: "ReLU6" bottom: "batch_norm_blob40" top: "relu6_blob30" } layer { name: "conv41" type: "Convolution" bottom: "relu6_blob30" top: "conv_blob41" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm41" type: "BatchNorm" bottom: "conv_blob41" top: "batch_norm_blob41" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale41" type: "Scale" bottom: "batch_norm_blob41" top: "batch_norm_blob41" scale_param { bias_term: true } } layer { name: "relu631" type: "ReLU6" bottom: "batch_norm_blob41" top: "relu6_blob31" } layer { name: "conv42" type: "Convolution" bottom: "relu6_blob31" top: "conv_blob42" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm42" type: "BatchNorm" bottom: "conv_blob42" top: "batch_norm_blob42" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale42" type: "Scale" bottom: "batch_norm_blob42" top: "batch_norm_blob42" scale_param { bias_term: true } } layer { name: "add11" type: "Eltwise" bottom: "add_blob10" bottom: "batch_norm_blob42" top: "add_blob11" eltwise_param { operation: SUM } } layer { name: "cat8" type: "Concat" bottom: "split_blob7" bottom: "add_blob11" top: "cat_blob8" concat_param { axis: 1 } } layer { name: "conv43" type: "Convolution" bottom: "cat_blob2" top: "conv_blob43" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm43" type: "BatchNorm" bottom: "conv_blob43" top: "batch_norm_blob43" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale43" type: "Scale" bottom: "batch_norm_blob43" top: "batch_norm_blob43" scale_param { bias_term: true } } layer { name: "relu632" type: "ReLU6" bottom: "batch_norm_blob43" top: "relu6_blob32" } layer { name: "conv44" type: "Convolution" bottom: "cat_blob4" top: "conv_blob44" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm44" type: "BatchNorm" bottom: "conv_blob44" top: "batch_norm_blob44" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale44" type: "Scale" bottom: "batch_norm_blob44" top: "batch_norm_blob44" scale_param { bias_term: true } } layer { name: "relu633" type: "ReLU6" bottom: "batch_norm_blob44" top: "relu6_blob33" } layer { name: "conv45" type: "Convolution" bottom: "cat_blob6" top: "conv_blob45" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm45" type: "BatchNorm" bottom: "conv_blob45" top: "batch_norm_blob45" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale45" type: "Scale" bottom: "batch_norm_blob45" top: "batch_norm_blob45" scale_param { bias_term: true } } layer { name: "relu634" type: "ReLU6" bottom: "batch_norm_blob45" top: "relu6_blob34" } layer { name: "conv46" type: "Convolution" bottom: "cat_blob8" top: "conv_blob46" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm46" type: "BatchNorm" bottom: "conv_blob46" top: "batch_norm_blob46" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale46" type: "Scale" bottom: "batch_norm_blob46" top: "batch_norm_blob46" scale_param { bias_term: true } } layer { name: "relu635" type: "ReLU6" bottom: "batch_norm_blob46" top: "relu6_blob35" } layer { name: "upsample1" type: "Deconvolution" bottom: "relu6_blob35" top: "upsample_blob1" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 2 group: 64 stride: 2 weight_filler { type: "xavier" } engine: CAFFE dilation: 1 } } layer { name: "crop1" type: "Crop" bottom: "upsample_blob1" bottom: "relu6_blob34" top: "crop0" crop_param { axis: 2 offset: 0 offset: 0 } } layer { name: "add12" type: "Eltwise" bottom: "relu6_blob34" bottom: "crop0" top: "add_blob12" eltwise_param { operation: SUM } } layer { name: "upsample2" type: "Deconvolution" bottom: "relu6_blob34" top: "upsample_blob2" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 2 group: 64 stride: 2 weight_filler { type: "xavier" } engine: CAFFE dilation: 1 } } layer { name: "crop2" type: "Crop" bottom: "upsample_blob2" bottom: "relu6_blob33" top: "crop2" crop_param { axis: 2 offset: 0 offset: 0 } } layer { name: "add13" type: "Eltwise" bottom: "crop2" bottom: "relu6_blob33" top: "add_blob13" eltwise_param { operation: SUM } } layer { name: "upsample3" type: "Deconvolution" bottom: "relu6_blob33" top: "upsample_blob3" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 2 group: 64 stride: 2 weight_filler { type: "xavier" } engine: CAFFE dilation: 1 } } layer { name: "crop3" type: "Crop" bottom: "upsample_blob3" bottom: "relu6_blob32" top: "crop3" crop_param { axis: 2 offset: 0 offset: 0 } } layer { name: "add14" type: "Eltwise" bottom: "crop3" bottom: "relu6_blob32" top: "add_blob14" eltwise_param { operation: SUM } } layer { name: "conv47" type: "Convolution" bottom: "add_blob14" top: "conv_blob47" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm47" type: "BatchNorm" bottom: "conv_blob47" top: "batch_norm_blob47" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale47" type: "Scale" bottom: "batch_norm_blob47" top: "batch_norm_blob47" scale_param { bias_term: true } } layer { name: "relu636" type: "ReLU6" bottom: "batch_norm_blob47" top: "relu6_blob36" } layer { name: "conv48" type: "Convolution" bottom: "relu6_blob36" top: "conv_blob48" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm48" type: "BatchNorm" bottom: "conv_blob48" top: "batch_norm_blob48" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale48" type: "Scale" bottom: "batch_norm_blob48" top: "batch_norm_blob48" scale_param { bias_term: true } } layer { name: "relu637" type: "ReLU6" bottom: "batch_norm_blob48" top: "relu6_blob37" } layer { name: "conv49" type: "Convolution" bottom: "relu6_blob37" top: "conv_blob49" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm49" type: "BatchNorm" bottom: "conv_blob49" top: "batch_norm_blob49" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale49" type: "Scale" bottom: "batch_norm_blob49" top: "batch_norm_blob49" scale_param { bias_term: true } } layer { name: "relu638" type: "ReLU6" bottom: "batch_norm_blob49" top: "relu6_blob38" } layer { name: "conv50" type: "Convolution" bottom: "relu6_blob38" top: "conv_blob50" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm50" type: "BatchNorm" bottom: "conv_blob50" top: "batch_norm_blob50" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale50" type: "Scale" bottom: "batch_norm_blob50" top: "batch_norm_blob50" scale_param { bias_term: true } } layer { name: "relu639" type: "ReLU6" bottom: "batch_norm_blob50" top: "relu6_blob39" } layer { name: "conv51" type: "Convolution" bottom: "relu6_blob39" top: "conv_blob51" convolution_param { num_output: 16 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm51" type: "BatchNorm" bottom: "conv_blob51" top: "batch_norm_blob51" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale51" type: "Scale" bottom: "batch_norm_blob51" top: "batch_norm_blob51" scale_param { bias_term: true } } layer { name: "relu640" type: "ReLU6" bottom: "batch_norm_blob51" top: "relu6_blob40" } layer { name: "conv52" type: "Convolution" bottom: "relu6_blob40" top: "conv_blob52" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm52" type: "BatchNorm" bottom: "conv_blob52" top: "batch_norm_blob52" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale52" type: "Scale" bottom: "batch_norm_blob52" top: "batch_norm_blob52" scale_param { bias_term: true } } layer { name: "relu641" type: "ReLU6" bottom: "batch_norm_blob52" top: "relu6_blob41" } layer { name: "cat9" type: "Concat" bottom: "relu6_blob37" bottom: "relu6_blob39" bottom: "relu6_blob41" top: "cat_blob9" concat_param { axis: 1 } } layer { name: "conv53" type: "Convolution" bottom: "add_blob13" top: "conv_blob53" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm53" type: "BatchNorm" bottom: "conv_blob53" top: "batch_norm_blob53" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale53" type: "Scale" bottom: "batch_norm_blob53" top: "batch_norm_blob53" scale_param { bias_term: true } } layer { name: "relu642" type: "ReLU6" bottom: "batch_norm_blob53" top: "relu6_blob42" } layer { name: "conv54" type: "Convolution" bottom: "relu6_blob42" top: "conv_blob54" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm54" type: "BatchNorm" bottom: "conv_blob54" top: "batch_norm_blob54" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale54" type: "Scale" bottom: "batch_norm_blob54" top: "batch_norm_blob54" scale_param { bias_term: true } } layer { name: "relu643" type: "ReLU6" bottom: "batch_norm_blob54" top: "relu6_blob43" } layer { name: "conv55" type: "Convolution" bottom: "relu6_blob43" top: "conv_blob55" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm55" type: "BatchNorm" bottom: "conv_blob55" top: "batch_norm_blob55" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale55" type: "Scale" bottom: "batch_norm_blob55" top: "batch_norm_blob55" scale_param { bias_term: true } } layer { name: "relu644" type: "ReLU6" bottom: "batch_norm_blob55" top: "relu6_blob44" } layer { name: "conv56" type: "Convolution" bottom: "relu6_blob44" top: "conv_blob56" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm56" type: "BatchNorm" bottom: "conv_blob56" top: "batch_norm_blob56" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale56" type: "Scale" bottom: "batch_norm_blob56" top: "batch_norm_blob56" scale_param { bias_term: true } } layer { name: "relu645" type: "ReLU6" bottom: "batch_norm_blob56" top: "relu6_blob45" } layer { name: "conv57" type: "Convolution" bottom: "relu6_blob45" top: "conv_blob57" convolution_param { num_output: 16 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm57" type: "BatchNorm" bottom: "conv_blob57" top: "batch_norm_blob57" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale57" type: "Scale" bottom: "batch_norm_blob57" top: "batch_norm_blob57" scale_param { bias_term: true } } layer { name: "relu646" type: "ReLU6" bottom: "batch_norm_blob57" top: "relu6_blob46" } layer { name: "conv58" type: "Convolution" bottom: "relu6_blob46" top: "conv_blob58" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm58" type: "BatchNorm" bottom: "conv_blob58" top: "batch_norm_blob58" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale58" type: "Scale" bottom: "batch_norm_blob58" top: "batch_norm_blob58" scale_param { bias_term: true } } layer { name: "relu647" type: "ReLU6" bottom: "batch_norm_blob58" top: "relu6_blob47" } layer { name: "cat10" type: "Concat" bottom: "relu6_blob43" bottom: "relu6_blob45" bottom: "relu6_blob47" top: "cat_blob10" concat_param { axis: 1 } } layer { name: "conv59" type: "Convolution" bottom: "add_blob12" top: "conv_blob59" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm59" type: "BatchNorm" bottom: "conv_blob59" top: "batch_norm_blob59" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale59" type: "Scale" bottom: "batch_norm_blob59" top: "batch_norm_blob59" scale_param { bias_term: true } } layer { name: "relu648" type: "ReLU6" bottom: "batch_norm_blob59" top: "relu6_blob48" } layer { name: "conv60" type: "Convolution" bottom: "relu6_blob48" top: "conv_blob60" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm60" type: "BatchNorm" bottom: "conv_blob60" top: "batch_norm_blob60" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale60" type: "Scale" bottom: "batch_norm_blob60" top: "batch_norm_blob60" scale_param { bias_term: true } } layer { name: "relu649" type: "ReLU6" bottom: "batch_norm_blob60" top: "relu6_blob49" } layer { name: "conv61" type: "Convolution" bottom: "relu6_blob49" top: "conv_blob61" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm61" type: "BatchNorm" bottom: "conv_blob61" top: "batch_norm_blob61" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale61" type: "Scale" bottom: "batch_norm_blob61" top: "batch_norm_blob61" scale_param { bias_term: true } } layer { name: "relu650" type: "ReLU6" bottom: "batch_norm_blob61" top: "relu6_blob50" } layer { name: "conv62" type: "Convolution" bottom: "relu6_blob50" top: "conv_blob62" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm62" type: "BatchNorm" bottom: "conv_blob62" top: "batch_norm_blob62" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale62" type: "Scale" bottom: "batch_norm_blob62" top: "batch_norm_blob62" scale_param { bias_term: true } } layer { name: "relu651" type: "ReLU6" bottom: "batch_norm_blob62" top: "relu6_blob51" } layer { name: "conv63" type: "Convolution" bottom: "relu6_blob51" top: "conv_blob63" convolution_param { num_output: 16 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm63" type: "BatchNorm" bottom: "conv_blob63" top: "batch_norm_blob63" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale63" type: "Scale" bottom: "batch_norm_blob63" top: "batch_norm_blob63" scale_param { bias_term: true } } layer { name: "relu652" type: "ReLU6" bottom: "batch_norm_blob63" top: "relu6_blob52" } layer { name: "conv64" type: "Convolution" bottom: "relu6_blob52" top: "conv_blob64" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm64" type: "BatchNorm" bottom: "conv_blob64" top: "batch_norm_blob64" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale64" type: "Scale" bottom: "batch_norm_blob64" top: "batch_norm_blob64" scale_param { bias_term: true } } layer { name: "relu653" type: "ReLU6" bottom: "batch_norm_blob64" top: "relu6_blob53" } layer { name: "cat11" type: "Concat" bottom: "relu6_blob49" bottom: "relu6_blob51" bottom: "relu6_blob53" top: "cat_blob11" concat_param { axis: 1 } } layer { name: "conv65" type: "Convolution" bottom: "relu6_blob35" top: "conv_blob65" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm65" type: "BatchNorm" bottom: "conv_blob65" top: "batch_norm_blob65" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale65" type: "Scale" bottom: "batch_norm_blob65" top: "batch_norm_blob65" scale_param { bias_term: true } } layer { name: "relu654" type: "ReLU6" bottom: "batch_norm_blob65" top: "relu6_blob54" } layer { name: "conv66" type: "Convolution" bottom: "relu6_blob54" top: "conv_blob66" convolution_param { num_output: 32 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm66" type: "BatchNorm" bottom: "conv_blob66" top: "batch_norm_blob66" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale66" type: "Scale" bottom: "batch_norm_blob66" top: "batch_norm_blob66" scale_param { bias_term: true } } layer { name: "relu655" type: "ReLU6" bottom: "batch_norm_blob66" top: "relu6_blob55" } layer { name: "conv67" type: "Convolution" bottom: "relu6_blob55" top: "conv_blob67" convolution_param { num_output: 32 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm67" type: "BatchNorm" bottom: "conv_blob67" top: "batch_norm_blob67" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale67" type: "Scale" bottom: "batch_norm_blob67" top: "batch_norm_blob67" scale_param { bias_term: true } } layer { name: "relu656" type: "ReLU6" bottom: "batch_norm_blob67" top: "relu6_blob56" } layer { name: "conv68" type: "Convolution" bottom: "relu6_blob56" top: "conv_blob68" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm68" type: "BatchNorm" bottom: "conv_blob68" top: "batch_norm_blob68" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale68" type: "Scale" bottom: "batch_norm_blob68" top: "batch_norm_blob68" scale_param { bias_term: true } } layer { name: "relu657" type: "ReLU6" bottom: "batch_norm_blob68" top: "relu6_blob57" } layer { name: "conv69" type: "Convolution" bottom: "relu6_blob57" top: "conv_blob69" convolution_param { num_output: 16 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm69" type: "BatchNorm" bottom: "conv_blob69" top: "batch_norm_blob69" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale69" type: "Scale" bottom: "batch_norm_blob69" top: "batch_norm_blob69" scale_param { bias_term: true } } layer { name: "relu658" type: "ReLU6" bottom: "batch_norm_blob69" top: "relu6_blob58" } layer { name: "conv70" type: "Convolution" bottom: "relu6_blob58" top: "conv_blob70" convolution_param { num_output: 16 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm70" type: "BatchNorm" bottom: "conv_blob70" top: "batch_norm_blob70" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale70" type: "Scale" bottom: "batch_norm_blob70" top: "batch_norm_blob70" scale_param { bias_term: true } } layer { name: "relu659" type: "ReLU6" bottom: "batch_norm_blob70" top: "relu6_blob59" } layer { name: "cat12" type: "Concat" bottom: "relu6_blob55" bottom: "relu6_blob57" bottom: "relu6_blob59" top: "cat_blob12" concat_param { axis: 1 } } layer { name: "conv71" type: "Convolution" bottom: "cat_blob9" top: "conv_blob71" convolution_param { num_output: 8 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv72" type: "Convolution" bottom: "cat_blob9" top: "conv_blob72" convolution_param { num_output: 16 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv73" type: "Convolution" bottom: "cat_blob9" top: "conv_blob73" convolution_param { num_output: 40 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv74" type: "Convolution" bottom: "cat_blob10" top: "conv_blob74" convolution_param { num_output: 6 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv75" type: "Convolution" bottom: "cat_blob10" top: "conv_blob75" convolution_param { num_output: 12 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv76" type: "Convolution" bottom: "cat_blob10" top: "conv_blob76" convolution_param { num_output: 30 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv77" type: "Convolution" bottom: "cat_blob11" top: "conv_blob77" convolution_param { num_output: 6 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv78" type: "Convolution" bottom: "cat_blob11" top: "conv_blob78" convolution_param { num_output: 12 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv79" type: "Convolution" bottom: "cat_blob11" top: "conv_blob79" convolution_param { num_output: 30 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv80" type: "Convolution" bottom: "cat_blob12" top: "conv_blob80" convolution_param { num_output: 4 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv81" type: "Convolution" bottom: "cat_blob12" top: "conv_blob81" convolution_param { num_output: 8 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "conv82" type: "Convolution" bottom: "cat_blob12" top: "conv_blob82" convolution_param { num_output: 20 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } }