name: "TransferedPytorchModel" input: "blob1" input_dim: 1 input_dim: 3 input_dim: 256 input_dim: 256 layer { name: "conv1" type: "Convolution" bottom: "blob1" top: "conv_blob1" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm1" type: "BatchNorm" bottom: "conv_blob1" top: "batch_norm_blob1" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale1" type: "Scale" bottom: "batch_norm_blob1" top: "batch_norm_blob1" scale_param { bias_term: true } } layer { name: "relu1" type: "ReLU" bottom: "batch_norm_blob1" top: "relu_blob1" } layer { name: "conv2" type: "Convolution" bottom: "relu_blob1" top: "conv_blob2" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm2" type: "BatchNorm" bottom: "conv_blob2" top: "batch_norm_blob2" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale2" type: "Scale" bottom: "batch_norm_blob2" top: "batch_norm_blob2" scale_param { bias_term: true } } layer { name: "relu2" type: "ReLU" bottom: "batch_norm_blob2" top: "relu_blob2" } layer { name: "conv3" type: "Convolution" bottom: "relu_blob2" top: "conv_blob3" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm3" type: "BatchNorm" bottom: "conv_blob3" top: "batch_norm_blob3" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale3" type: "Scale" bottom: "batch_norm_blob3" top: "batch_norm_blob3" scale_param { bias_term: true } } layer { name: "relu3" type: "ReLU" bottom: "batch_norm_blob3" top: "relu_blob3" } layer { name: "conv4" type: "Convolution" bottom: "relu_blob3" top: "conv_blob4" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm4" type: "BatchNorm" bottom: "conv_blob4" top: "batch_norm_blob4" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale4" type: "Scale" bottom: "batch_norm_blob4" top: "batch_norm_blob4" scale_param { bias_term: true } } layer { name: "relu4" type: "ReLU" bottom: "batch_norm_blob4" top: "relu_blob4" } layer { name: "conv5" type: "Convolution" bottom: "relu_blob4" top: "conv_blob5" convolution_param { num_output: 256 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm5" type: "BatchNorm" bottom: "conv_blob5" top: "batch_norm_blob5" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale5" type: "Scale" bottom: "batch_norm_blob5" top: "batch_norm_blob5" scale_param { bias_term: true } } layer { name: "conv6" type: "Convolution" bottom: "relu_blob2" top: "conv_blob6" convolution_param { num_output: 256 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm6" type: "BatchNorm" bottom: "conv_blob6" top: "batch_norm_blob6" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale6" type: "Scale" bottom: "batch_norm_blob6" top: "batch_norm_blob6" scale_param { bias_term: true } } layer { name: "add1" type: "Eltwise" bottom: "batch_norm_blob5" bottom: "batch_norm_blob6" top: "add_blob1" eltwise_param { operation: SUM } } layer { name: "relu5" type: "ReLU" bottom: "add_blob1" top: "relu_blob5" } layer { name: "conv7" type: "Convolution" bottom: "relu_blob5" top: "conv_blob7" convolution_param { num_output: 64 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm7" type: "BatchNorm" bottom: "conv_blob7" top: "batch_norm_blob7" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale7" type: "Scale" bottom: "batch_norm_blob7" top: "batch_norm_blob7" scale_param { bias_term: true } } layer { name: "relu6" type: "ReLU" bottom: "batch_norm_blob7" top: "relu_blob6" } layer { name: "conv8" type: "Convolution" bottom: "relu_blob6" top: "conv_blob8" convolution_param { num_output: 64 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm8" type: "BatchNorm" bottom: "conv_blob8" top: "batch_norm_blob8" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale8" type: "Scale" bottom: "batch_norm_blob8" top: "batch_norm_blob8" scale_param { bias_term: true } } layer { name: "relu7" type: "ReLU" bottom: "batch_norm_blob8" top: "relu_blob7" } layer { name: "conv9" type: "Convolution" bottom: "relu_blob7" top: "conv_blob9" convolution_param { num_output: 256 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm9" type: "BatchNorm" bottom: "conv_blob9" top: "batch_norm_blob9" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale9" type: "Scale" bottom: "batch_norm_blob9" top: "batch_norm_blob9" scale_param { bias_term: true } } layer { name: "add2" type: "Eltwise" bottom: "batch_norm_blob9" bottom: "relu_blob5" top: "add_blob2" eltwise_param { operation: SUM } } layer { name: "relu8" type: "ReLU" bottom: "add_blob2" top: "relu_blob8" } layer { name: "conv10" type: "Convolution" bottom: "relu_blob8" top: "conv_blob10" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm10" type: "BatchNorm" bottom: "conv_blob10" top: "batch_norm_blob10" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale10" type: "Scale" bottom: "batch_norm_blob10" top: "batch_norm_blob10" scale_param { bias_term: true } } layer { name: "relu9" type: "ReLU" bottom: "batch_norm_blob10" top: "relu_blob9" } layer { name: "conv11" type: "Convolution" bottom: "relu_blob8" top: "conv_blob11" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm11" type: "BatchNorm" bottom: "conv_blob11" top: "batch_norm_blob11" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale11" type: "Scale" bottom: "batch_norm_blob11" top: "batch_norm_blob11" scale_param { bias_term: true } } layer { name: "relu10" type: "ReLU" bottom: "batch_norm_blob11" top: "relu_blob10" } layer { name: "conv12" type: "Convolution" bottom: "relu_blob9" top: "conv_blob12" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm12" type: "BatchNorm" bottom: "conv_blob12" top: "batch_norm_blob12" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale12" type: "Scale" bottom: "batch_norm_blob12" top: "batch_norm_blob12" scale_param { bias_term: true } } layer { name: "relu11" type: "ReLU" bottom: "batch_norm_blob12" top: "relu_blob11" } layer { name: "conv13" type: "Convolution" bottom: "relu_blob11" top: "conv_blob13" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm13" type: "BatchNorm" bottom: "conv_blob13" top: "batch_norm_blob13" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale13" type: "Scale" bottom: "batch_norm_blob13" top: "batch_norm_blob13" scale_param { bias_term: true } } layer { name: "add3" type: "Eltwise" bottom: "batch_norm_blob13" bottom: "relu_blob9" top: "add_blob3" eltwise_param { operation: SUM } } layer { name: "relu12" type: "ReLU" bottom: "add_blob3" top: "relu_blob12" } layer { name: "conv14" type: "Convolution" bottom: "relu_blob12" top: "conv_blob14" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm14" type: "BatchNorm" bottom: "conv_blob14" top: "batch_norm_blob14" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale14" type: "Scale" bottom: "batch_norm_blob14" top: "batch_norm_blob14" scale_param { bias_term: true } } layer { name: "relu13" type: "ReLU" bottom: "batch_norm_blob14" top: "relu_blob13" } layer { name: "conv15" type: "Convolution" bottom: "relu_blob13" top: "conv_blob15" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm15" type: "BatchNorm" bottom: "conv_blob15" top: "batch_norm_blob15" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale15" type: "Scale" bottom: "batch_norm_blob15" top: "batch_norm_blob15" scale_param { bias_term: true } } layer { name: "add4" type: "Eltwise" bottom: "batch_norm_blob15" bottom: "relu_blob12" top: "add_blob4" eltwise_param { operation: SUM } } layer { name: "relu14" type: "ReLU" bottom: "add_blob4" top: "relu_blob14" } layer { name: "conv16" type: "Convolution" bottom: "relu_blob10" top: "conv_blob16" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm16" type: "BatchNorm" bottom: "conv_blob16" top: "batch_norm_blob16" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale16" type: "Scale" bottom: "batch_norm_blob16" top: "batch_norm_blob16" scale_param { bias_term: true } } layer { name: "relu15" type: "ReLU" bottom: "batch_norm_blob16" top: "relu_blob15" } layer { name: "conv17" type: "Convolution" bottom: "relu_blob15" top: "conv_blob17" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm17" type: "BatchNorm" bottom: "conv_blob17" top: "batch_norm_blob17" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale17" type: "Scale" bottom: "batch_norm_blob17" top: "batch_norm_blob17" scale_param { bias_term: true } } layer { name: "add5" type: "Eltwise" bottom: "batch_norm_blob17" bottom: "relu_blob10" top: "add_blob5" eltwise_param { operation: SUM } } layer { name: "relu16" type: "ReLU" bottom: "add_blob5" top: "relu_blob16" } layer { name: "conv18" type: "Convolution" bottom: "relu_blob16" top: "conv_blob18" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm18" type: "BatchNorm" bottom: "conv_blob18" top: "batch_norm_blob18" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale18" type: "Scale" bottom: "batch_norm_blob18" top: "batch_norm_blob18" scale_param { bias_term: true } } layer { name: "relu17" type: "ReLU" bottom: "batch_norm_blob18" top: "relu_blob17" } layer { name: "conv19" type: "Convolution" bottom: "relu_blob17" top: "conv_blob19" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm19" type: "BatchNorm" bottom: "conv_blob19" top: "batch_norm_blob19" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale19" type: "Scale" bottom: "batch_norm_blob19" top: "batch_norm_blob19" scale_param { bias_term: true } } layer { name: "add6" type: "Eltwise" bottom: "batch_norm_blob19" bottom: "relu_blob16" top: "add_blob6" eltwise_param { operation: SUM } } layer { name: "relu18" type: "ReLU" bottom: "add_blob6" top: "relu_blob18" } layer { name: "conv20" type: "Convolution" bottom: "relu_blob18" top: "conv_blob20" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm20" type: "BatchNorm" bottom: "conv_blob20" top: "batch_norm_blob20" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale20" type: "Scale" bottom: "batch_norm_blob20" top: "batch_norm_blob20" scale_param { bias_term: true } } layer { name: "upsample1" type: "Interp" bottom: "batch_norm_blob20" top: "upsample_blob1" interp_param { height: 64 width: 64 } } layer { name: "add7" type: "Eltwise" bottom: "relu_blob14" bottom: "upsample_blob1" top: "add_blob7" eltwise_param { operation: SUM } } layer { name: "relu19" type: "ReLU" bottom: "add_blob7" top: "relu_blob19" } layer { name: "conv21" type: "Convolution" bottom: "relu_blob14" top: "conv_blob21" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm21" type: "BatchNorm" bottom: "conv_blob21" top: "batch_norm_blob21" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale21" type: "Scale" bottom: "batch_norm_blob21" top: "batch_norm_blob21" scale_param { bias_term: true } } layer { name: "add8" type: "Eltwise" bottom: "batch_norm_blob21" bottom: "relu_blob18" top: "add_blob8" eltwise_param { operation: SUM } } layer { name: "relu20" type: "ReLU" bottom: "add_blob8" top: "relu_blob20" } layer { name: "conv22" type: "Convolution" bottom: "relu_blob20" top: "conv_blob22" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm22" type: "BatchNorm" bottom: "conv_blob22" top: "batch_norm_blob22" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale22" type: "Scale" bottom: "batch_norm_blob22" top: "batch_norm_blob22" scale_param { bias_term: true } } layer { name: "relu21" type: "ReLU" bottom: "batch_norm_blob22" top: "relu_blob21" } layer { name: "conv23" type: "Convolution" bottom: "relu_blob19" top: "conv_blob23" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm23" type: "BatchNorm" bottom: "conv_blob23" top: "batch_norm_blob23" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale23" type: "Scale" bottom: "batch_norm_blob23" top: "batch_norm_blob23" scale_param { bias_term: true } } layer { name: "relu22" type: "ReLU" bottom: "batch_norm_blob23" top: "relu_blob22" } layer { name: "conv24" type: "Convolution" bottom: "relu_blob22" top: "conv_blob24" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm24" type: "BatchNorm" bottom: "conv_blob24" top: "batch_norm_blob24" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale24" type: "Scale" bottom: "batch_norm_blob24" top: "batch_norm_blob24" scale_param { bias_term: true } } layer { name: "add9" type: "Eltwise" bottom: "batch_norm_blob24" bottom: "relu_blob19" top: "add_blob9" eltwise_param { operation: SUM } } layer { name: "relu23" type: "ReLU" bottom: "add_blob9" top: "relu_blob23" } layer { name: "conv25" type: "Convolution" bottom: "relu_blob23" top: "conv_blob25" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm25" type: "BatchNorm" bottom: "conv_blob25" top: "batch_norm_blob25" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale25" type: "Scale" bottom: "batch_norm_blob25" top: "batch_norm_blob25" scale_param { bias_term: true } } layer { name: "relu24" type: "ReLU" bottom: "batch_norm_blob25" top: "relu_blob24" } layer { name: "conv26" type: "Convolution" bottom: "relu_blob24" top: "conv_blob26" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm26" type: "BatchNorm" bottom: "conv_blob26" top: "batch_norm_blob26" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale26" type: "Scale" bottom: "batch_norm_blob26" top: "batch_norm_blob26" scale_param { bias_term: true } } layer { name: "add10" type: "Eltwise" bottom: "batch_norm_blob26" bottom: "relu_blob23" top: "add_blob10" eltwise_param { operation: SUM } } layer { name: "relu25" type: "ReLU" bottom: "add_blob10" top: "relu_blob25" } layer { name: "conv27" type: "Convolution" bottom: "relu_blob20" top: "conv_blob27" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm27" type: "BatchNorm" bottom: "conv_blob27" top: "batch_norm_blob27" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale27" type: "Scale" bottom: "batch_norm_blob27" top: "batch_norm_blob27" scale_param { bias_term: true } } layer { name: "relu26" type: "ReLU" bottom: "batch_norm_blob27" top: "relu_blob26" } layer { name: "conv28" type: "Convolution" bottom: "relu_blob26" top: "conv_blob28" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm28" type: "BatchNorm" bottom: "conv_blob28" top: "batch_norm_blob28" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale28" type: "Scale" bottom: "batch_norm_blob28" top: "batch_norm_blob28" scale_param { bias_term: true } } layer { name: "add11" type: "Eltwise" bottom: "batch_norm_blob28" bottom: "relu_blob20" top: "add_blob11" eltwise_param { operation: SUM } } layer { name: "relu27" type: "ReLU" bottom: "add_blob11" top: "relu_blob27" } layer { name: "conv29" type: "Convolution" bottom: "relu_blob27" top: "conv_blob29" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm29" type: "BatchNorm" bottom: "conv_blob29" top: "batch_norm_blob29" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale29" type: "Scale" bottom: "batch_norm_blob29" top: "batch_norm_blob29" scale_param { bias_term: true } } layer { name: "relu28" type: "ReLU" bottom: "batch_norm_blob29" top: "relu_blob28" } layer { name: "conv30" type: "Convolution" bottom: "relu_blob28" top: "conv_blob30" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm30" type: "BatchNorm" bottom: "conv_blob30" top: "batch_norm_blob30" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale30" type: "Scale" bottom: "batch_norm_blob30" top: "batch_norm_blob30" scale_param { bias_term: true } } layer { name: "add12" type: "Eltwise" bottom: "batch_norm_blob30" bottom: "relu_blob27" top: "add_blob12" eltwise_param { operation: SUM } } layer { name: "relu29" type: "ReLU" bottom: "add_blob12" top: "relu_blob29" } layer { name: "conv31" type: "Convolution" bottom: "relu_blob21" top: "conv_blob31" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm31" type: "BatchNorm" bottom: "conv_blob31" top: "batch_norm_blob31" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale31" type: "Scale" bottom: "batch_norm_blob31" top: "batch_norm_blob31" scale_param { bias_term: true } } layer { name: "relu30" type: "ReLU" bottom: "batch_norm_blob31" top: "relu_blob30" } layer { name: "conv32" type: "Convolution" bottom: "relu_blob30" top: "conv_blob32" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm32" type: "BatchNorm" bottom: "conv_blob32" top: "batch_norm_blob32" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale32" type: "Scale" bottom: "batch_norm_blob32" top: "batch_norm_blob32" scale_param { bias_term: true } } layer { name: "add13" type: "Eltwise" bottom: "batch_norm_blob32" bottom: "relu_blob21" top: "add_blob13" eltwise_param { operation: SUM } } layer { name: "relu31" type: "ReLU" bottom: "add_blob13" top: "relu_blob31" } layer { name: "conv33" type: "Convolution" bottom: "relu_blob31" top: "conv_blob33" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm33" type: "BatchNorm" bottom: "conv_blob33" top: "batch_norm_blob33" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale33" type: "Scale" bottom: "batch_norm_blob33" top: "batch_norm_blob33" scale_param { bias_term: true } } layer { name: "relu32" type: "ReLU" bottom: "batch_norm_blob33" top: "relu_blob32" } layer { name: "conv34" type: "Convolution" bottom: "relu_blob32" top: "conv_blob34" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm34" type: "BatchNorm" bottom: "conv_blob34" top: "batch_norm_blob34" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale34" type: "Scale" bottom: "batch_norm_blob34" top: "batch_norm_blob34" scale_param { bias_term: true } } layer { name: "add14" type: "Eltwise" bottom: "batch_norm_blob34" bottom: "relu_blob31" top: "add_blob14" eltwise_param { operation: SUM } } layer { name: "relu33" type: "ReLU" bottom: "add_blob14" top: "relu_blob33" } layer { name: "conv35" type: "Convolution" bottom: "relu_blob29" top: "conv_blob35" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm35" type: "BatchNorm" bottom: "conv_blob35" top: "batch_norm_blob35" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale35" type: "Scale" bottom: "batch_norm_blob35" top: "batch_norm_blob35" scale_param { bias_term: true } } layer { name: "upsample2" type: "Interp" bottom: "batch_norm_blob35" top: "upsample_blob2" interp_param { height: 64 width: 64 } } layer { name: "add15" type: "Eltwise" bottom: "relu_blob25" bottom: "upsample_blob2" top: "add_blob15" eltwise_param { operation: SUM } } layer { name: "conv36" type: "Convolution" bottom: "relu_blob33" top: "conv_blob36" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm36" type: "BatchNorm" bottom: "conv_blob36" top: "batch_norm_blob36" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale36" type: "Scale" bottom: "batch_norm_blob36" top: "batch_norm_blob36" scale_param { bias_term: true } } layer { name: "upsample3" type: "Interp" bottom: "batch_norm_blob36" top: "upsample_blob3" interp_param { height: 64 width: 64 } } layer { name: "add16" type: "Eltwise" bottom: "add_blob15" bottom: "upsample_blob3" top: "add_blob16" eltwise_param { operation: SUM } } layer { name: "relu34" type: "ReLU" bottom: "add_blob16" top: "relu_blob34" } layer { name: "conv37" type: "Convolution" bottom: "relu_blob25" top: "conv_blob37" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm37" type: "BatchNorm" bottom: "conv_blob37" top: "batch_norm_blob37" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale37" type: "Scale" bottom: "batch_norm_blob37" top: "batch_norm_blob37" scale_param { bias_term: true } } layer { name: "add17" type: "Eltwise" bottom: "batch_norm_blob37" bottom: "relu_blob29" top: "add_blob17" eltwise_param { operation: SUM } } layer { name: "conv38" type: "Convolution" bottom: "relu_blob33" top: "conv_blob38" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm38" type: "BatchNorm" bottom: "conv_blob38" top: "batch_norm_blob38" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale38" type: "Scale" bottom: "batch_norm_blob38" top: "batch_norm_blob38" scale_param { bias_term: true } } layer { name: "upsample4" type: "Interp" bottom: "batch_norm_blob38" top: "upsample_blob4" interp_param { height: 32 width: 32 } } layer { name: "add18" type: "Eltwise" bottom: "add_blob17" bottom: "upsample_blob4" top: "add_blob18" eltwise_param { operation: SUM } } layer { name: "relu35" type: "ReLU" bottom: "add_blob18" top: "relu_blob35" } layer { name: "conv39" type: "Convolution" bottom: "relu_blob25" top: "conv_blob39" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm39" type: "BatchNorm" bottom: "conv_blob39" top: "batch_norm_blob39" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale39" type: "Scale" bottom: "batch_norm_blob39" top: "batch_norm_blob39" scale_param { bias_term: true } } layer { name: "relu36" type: "ReLU" bottom: "batch_norm_blob39" top: "relu_blob36" } layer { name: "conv40" type: "Convolution" bottom: "relu_blob36" top: "conv_blob40" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm40" type: "BatchNorm" bottom: "conv_blob40" top: "batch_norm_blob40" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale40" type: "Scale" bottom: "batch_norm_blob40" top: "batch_norm_blob40" scale_param { bias_term: true } } layer { name: "conv41" type: "Convolution" bottom: "relu_blob29" top: "conv_blob41" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm41" type: "BatchNorm" bottom: "conv_blob41" top: "batch_norm_blob41" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale41" type: "Scale" bottom: "batch_norm_blob41" top: "batch_norm_blob41" scale_param { bias_term: true } } layer { name: "add19" type: "Eltwise" bottom: "batch_norm_blob40" bottom: "batch_norm_blob41" top: "add_blob19" eltwise_param { operation: SUM } } layer { name: "add20" type: "Eltwise" bottom: "add_blob19" bottom: "relu_blob33" top: "add_blob20" eltwise_param { operation: SUM } } layer { name: "relu37" type: "ReLU" bottom: "add_blob20" top: "relu_blob37" } layer { name: "conv42" type: "Convolution" bottom: "relu_blob34" top: "conv_blob42" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm42" type: "BatchNorm" bottom: "conv_blob42" top: "batch_norm_blob42" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale42" type: "Scale" bottom: "batch_norm_blob42" top: "batch_norm_blob42" scale_param { bias_term: true } } layer { name: "relu38" type: "ReLU" bottom: "batch_norm_blob42" top: "relu_blob38" } layer { name: "conv43" type: "Convolution" bottom: "relu_blob38" top: "conv_blob43" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm43" type: "BatchNorm" bottom: "conv_blob43" top: "batch_norm_blob43" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale43" type: "Scale" bottom: "batch_norm_blob43" top: "batch_norm_blob43" scale_param { bias_term: true } } layer { name: "add21" type: "Eltwise" bottom: "batch_norm_blob43" bottom: "relu_blob34" top: "add_blob21" eltwise_param { operation: SUM } } layer { name: "relu39" type: "ReLU" bottom: "add_blob21" top: "relu_blob39" } layer { name: "conv44" type: "Convolution" bottom: "relu_blob39" top: "conv_blob44" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm44" type: "BatchNorm" bottom: "conv_blob44" top: "batch_norm_blob44" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale44" type: "Scale" bottom: "batch_norm_blob44" top: "batch_norm_blob44" scale_param { bias_term: true } } layer { name: "relu40" type: "ReLU" bottom: "batch_norm_blob44" top: "relu_blob40" } layer { name: "conv45" type: "Convolution" bottom: "relu_blob40" top: "conv_blob45" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm45" type: "BatchNorm" bottom: "conv_blob45" top: "batch_norm_blob45" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale45" type: "Scale" bottom: "batch_norm_blob45" top: "batch_norm_blob45" scale_param { bias_term: true } } layer { name: "add22" type: "Eltwise" bottom: "batch_norm_blob45" bottom: "relu_blob39" top: "add_blob22" eltwise_param { operation: SUM } } layer { name: "relu41" type: "ReLU" bottom: "add_blob22" top: "relu_blob41" } layer { name: "conv46" type: "Convolution" bottom: "relu_blob35" top: "conv_blob46" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm46" type: "BatchNorm" bottom: "conv_blob46" top: "batch_norm_blob46" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale46" type: "Scale" bottom: "batch_norm_blob46" top: "batch_norm_blob46" scale_param { bias_term: true } } layer { name: "relu42" type: "ReLU" bottom: "batch_norm_blob46" top: "relu_blob42" } layer { name: "conv47" type: "Convolution" bottom: "relu_blob42" top: "conv_blob47" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm47" type: "BatchNorm" bottom: "conv_blob47" top: "batch_norm_blob47" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale47" type: "Scale" bottom: "batch_norm_blob47" top: "batch_norm_blob47" scale_param { bias_term: true } } layer { name: "add23" type: "Eltwise" bottom: "batch_norm_blob47" bottom: "relu_blob35" top: "add_blob23" eltwise_param { operation: SUM } } layer { name: "relu43" type: "ReLU" bottom: "add_blob23" top: "relu_blob43" } layer { name: "conv48" type: "Convolution" bottom: "relu_blob43" top: "conv_blob48" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm48" type: "BatchNorm" bottom: "conv_blob48" top: "batch_norm_blob48" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale48" type: "Scale" bottom: "batch_norm_blob48" top: "batch_norm_blob48" scale_param { bias_term: true } } layer { name: "relu44" type: "ReLU" bottom: "batch_norm_blob48" top: "relu_blob44" } layer { name: "conv49" type: "Convolution" bottom: "relu_blob44" top: "conv_blob49" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm49" type: "BatchNorm" bottom: "conv_blob49" top: "batch_norm_blob49" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale49" type: "Scale" bottom: "batch_norm_blob49" top: "batch_norm_blob49" scale_param { bias_term: true } } layer { name: "add24" type: "Eltwise" bottom: "batch_norm_blob49" bottom: "relu_blob43" top: "add_blob24" eltwise_param { operation: SUM } } layer { name: "relu45" type: "ReLU" bottom: "add_blob24" top: "relu_blob45" } layer { name: "conv50" type: "Convolution" bottom: "relu_blob37" top: "conv_blob50" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm50" type: "BatchNorm" bottom: "conv_blob50" top: "batch_norm_blob50" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale50" type: "Scale" bottom: "batch_norm_blob50" top: "batch_norm_blob50" scale_param { bias_term: true } } layer { name: "relu46" type: "ReLU" bottom: "batch_norm_blob50" top: "relu_blob46" } layer { name: "conv51" type: "Convolution" bottom: "relu_blob46" top: "conv_blob51" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm51" type: "BatchNorm" bottom: "conv_blob51" top: "batch_norm_blob51" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale51" type: "Scale" bottom: "batch_norm_blob51" top: "batch_norm_blob51" scale_param { bias_term: true } } layer { name: "add25" type: "Eltwise" bottom: "batch_norm_blob51" bottom: "relu_blob37" top: "add_blob25" eltwise_param { operation: SUM } } layer { name: "relu47" type: "ReLU" bottom: "add_blob25" top: "relu_blob47" } layer { name: "conv52" type: "Convolution" bottom: "relu_blob47" top: "conv_blob52" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm52" type: "BatchNorm" bottom: "conv_blob52" top: "batch_norm_blob52" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale52" type: "Scale" bottom: "batch_norm_blob52" top: "batch_norm_blob52" scale_param { bias_term: true } } layer { name: "relu48" type: "ReLU" bottom: "batch_norm_blob52" top: "relu_blob48" } layer { name: "conv53" type: "Convolution" bottom: "relu_blob48" top: "conv_blob53" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm53" type: "BatchNorm" bottom: "conv_blob53" top: "batch_norm_blob53" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale53" type: "Scale" bottom: "batch_norm_blob53" top: "batch_norm_blob53" scale_param { bias_term: true } } layer { name: "add26" type: "Eltwise" bottom: "batch_norm_blob53" bottom: "relu_blob47" top: "add_blob26" eltwise_param { operation: SUM } } layer { name: "relu49" type: "ReLU" bottom: "add_blob26" top: "relu_blob49" } layer { name: "conv54" type: "Convolution" bottom: "relu_blob45" top: "conv_blob54" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm54" type: "BatchNorm" bottom: "conv_blob54" top: "batch_norm_blob54" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale54" type: "Scale" bottom: "batch_norm_blob54" top: "batch_norm_blob54" scale_param { bias_term: true } } layer { name: "upsample5" type: "Interp" bottom: "batch_norm_blob54" top: "upsample_blob5" interp_param { height: 64 width: 64 } } layer { name: "add27" type: "Eltwise" bottom: "relu_blob41" bottom: "upsample_blob5" top: "add_blob27" eltwise_param { operation: SUM } } layer { name: "conv55" type: "Convolution" bottom: "relu_blob49" top: "conv_blob55" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm55" type: "BatchNorm" bottom: "conv_blob55" top: "batch_norm_blob55" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale55" type: "Scale" bottom: "batch_norm_blob55" top: "batch_norm_blob55" scale_param { bias_term: true } } layer { name: "upsample6" type: "Interp" bottom: "batch_norm_blob55" top: "upsample_blob6" interp_param { height: 64 width: 64 } } layer { name: "add28" type: "Eltwise" bottom: "add_blob27" bottom: "upsample_blob6" top: "add_blob28" eltwise_param { operation: SUM } } layer { name: "relu50" type: "ReLU" bottom: "add_blob28" top: "relu_blob50" } layer { name: "conv56" type: "Convolution" bottom: "relu_blob41" top: "conv_blob56" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm56" type: "BatchNorm" bottom: "conv_blob56" top: "batch_norm_blob56" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale56" type: "Scale" bottom: "batch_norm_blob56" top: "batch_norm_blob56" scale_param { bias_term: true } } layer { name: "add29" type: "Eltwise" bottom: "batch_norm_blob56" bottom: "relu_blob45" top: "add_blob29" eltwise_param { operation: SUM } } layer { name: "conv57" type: "Convolution" bottom: "relu_blob49" top: "conv_blob57" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm57" type: "BatchNorm" bottom: "conv_blob57" top: "batch_norm_blob57" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale57" type: "Scale" bottom: "batch_norm_blob57" top: "batch_norm_blob57" scale_param { bias_term: true } } layer { name: "upsample7" type: "Interp" bottom: "batch_norm_blob57" top: "upsample_blob7" interp_param { height: 32 width: 32 } } layer { name: "add30" type: "Eltwise" bottom: "add_blob29" bottom: "upsample_blob7" top: "add_blob30" eltwise_param { operation: SUM } } layer { name: "relu51" type: "ReLU" bottom: "add_blob30" top: "relu_blob51" } layer { name: "conv58" type: "Convolution" bottom: "relu_blob41" top: "conv_blob58" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm58" type: "BatchNorm" bottom: "conv_blob58" top: "batch_norm_blob58" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale58" type: "Scale" bottom: "batch_norm_blob58" top: "batch_norm_blob58" scale_param { bias_term: true } } layer { name: "relu52" type: "ReLU" bottom: "batch_norm_blob58" top: "relu_blob52" } layer { name: "conv59" type: "Convolution" bottom: "relu_blob52" top: "conv_blob59" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm59" type: "BatchNorm" bottom: "conv_blob59" top: "batch_norm_blob59" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale59" type: "Scale" bottom: "batch_norm_blob59" top: "batch_norm_blob59" scale_param { bias_term: true } } layer { name: "conv60" type: "Convolution" bottom: "relu_blob45" top: "conv_blob60" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm60" type: "BatchNorm" bottom: "conv_blob60" top: "batch_norm_blob60" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale60" type: "Scale" bottom: "batch_norm_blob60" top: "batch_norm_blob60" scale_param { bias_term: true } } layer { name: "add31" type: "Eltwise" bottom: "batch_norm_blob59" bottom: "batch_norm_blob60" top: "add_blob31" eltwise_param { operation: SUM } } layer { name: "add32" type: "Eltwise" bottom: "add_blob31" bottom: "relu_blob49" top: "add_blob32" eltwise_param { operation: SUM } } layer { name: "relu53" type: "ReLU" bottom: "add_blob32" top: "relu_blob53" } layer { name: "conv61" type: "Convolution" bottom: "relu_blob50" top: "conv_blob61" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm61" type: "BatchNorm" bottom: "conv_blob61" top: "batch_norm_blob61" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale61" type: "Scale" bottom: "batch_norm_blob61" top: "batch_norm_blob61" scale_param { bias_term: true } } layer { name: "relu54" type: "ReLU" bottom: "batch_norm_blob61" top: "relu_blob54" } layer { name: "conv62" type: "Convolution" bottom: "relu_blob54" top: "conv_blob62" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm62" type: "BatchNorm" bottom: "conv_blob62" top: "batch_norm_blob62" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale62" type: "Scale" bottom: "batch_norm_blob62" top: "batch_norm_blob62" scale_param { bias_term: true } } layer { name: "add33" type: "Eltwise" bottom: "batch_norm_blob62" bottom: "relu_blob50" top: "add_blob33" eltwise_param { operation: SUM } } layer { name: "relu55" type: "ReLU" bottom: "add_blob33" top: "relu_blob55" } layer { name: "conv63" type: "Convolution" bottom: "relu_blob55" top: "conv_blob63" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm63" type: "BatchNorm" bottom: "conv_blob63" top: "batch_norm_blob63" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale63" type: "Scale" bottom: "batch_norm_blob63" top: "batch_norm_blob63" scale_param { bias_term: true } } layer { name: "relu56" type: "ReLU" bottom: "batch_norm_blob63" top: "relu_blob56" } layer { name: "conv64" type: "Convolution" bottom: "relu_blob56" top: "conv_blob64" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm64" type: "BatchNorm" bottom: "conv_blob64" top: "batch_norm_blob64" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale64" type: "Scale" bottom: "batch_norm_blob64" top: "batch_norm_blob64" scale_param { bias_term: true } } layer { name: "add34" type: "Eltwise" bottom: "batch_norm_blob64" bottom: "relu_blob55" top: "add_blob34" eltwise_param { operation: SUM } } layer { name: "relu57" type: "ReLU" bottom: "add_blob34" top: "relu_blob57" } layer { name: "conv65" type: "Convolution" bottom: "relu_blob51" top: "conv_blob65" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm65" type: "BatchNorm" bottom: "conv_blob65" top: "batch_norm_blob65" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale65" type: "Scale" bottom: "batch_norm_blob65" top: "batch_norm_blob65" scale_param { bias_term: true } } layer { name: "relu58" type: "ReLU" bottom: "batch_norm_blob65" top: "relu_blob58" } layer { name: "conv66" type: "Convolution" bottom: "relu_blob58" top: "conv_blob66" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm66" type: "BatchNorm" bottom: "conv_blob66" top: "batch_norm_blob66" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale66" type: "Scale" bottom: "batch_norm_blob66" top: "batch_norm_blob66" scale_param { bias_term: true } } layer { name: "add35" type: "Eltwise" bottom: "batch_norm_blob66" bottom: "relu_blob51" top: "add_blob35" eltwise_param { operation: SUM } } layer { name: "relu59" type: "ReLU" bottom: "add_blob35" top: "relu_blob59" } layer { name: "conv67" type: "Convolution" bottom: "relu_blob59" top: "conv_blob67" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm67" type: "BatchNorm" bottom: "conv_blob67" top: "batch_norm_blob67" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale67" type: "Scale" bottom: "batch_norm_blob67" top: "batch_norm_blob67" scale_param { bias_term: true } } layer { name: "relu60" type: "ReLU" bottom: "batch_norm_blob67" top: "relu_blob60" } layer { name: "conv68" type: "Convolution" bottom: "relu_blob60" top: "conv_blob68" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm68" type: "BatchNorm" bottom: "conv_blob68" top: "batch_norm_blob68" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale68" type: "Scale" bottom: "batch_norm_blob68" top: "batch_norm_blob68" scale_param { bias_term: true } } layer { name: "add36" type: "Eltwise" bottom: "batch_norm_blob68" bottom: "relu_blob59" top: "add_blob36" eltwise_param { operation: SUM } } layer { name: "relu61" type: "ReLU" bottom: "add_blob36" top: "relu_blob61" } layer { name: "conv69" type: "Convolution" bottom: "relu_blob53" top: "conv_blob69" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm69" type: "BatchNorm" bottom: "conv_blob69" top: "batch_norm_blob69" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale69" type: "Scale" bottom: "batch_norm_blob69" top: "batch_norm_blob69" scale_param { bias_term: true } } layer { name: "relu62" type: "ReLU" bottom: "batch_norm_blob69" top: "relu_blob62" } layer { name: "conv70" type: "Convolution" bottom: "relu_blob62" top: "conv_blob70" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm70" type: "BatchNorm" bottom: "conv_blob70" top: "batch_norm_blob70" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale70" type: "Scale" bottom: "batch_norm_blob70" top: "batch_norm_blob70" scale_param { bias_term: true } } layer { name: "add37" type: "Eltwise" bottom: "batch_norm_blob70" bottom: "relu_blob53" top: "add_blob37" eltwise_param { operation: SUM } } layer { name: "relu63" type: "ReLU" bottom: "add_blob37" top: "relu_blob63" } layer { name: "conv71" type: "Convolution" bottom: "relu_blob63" top: "conv_blob71" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm71" type: "BatchNorm" bottom: "conv_blob71" top: "batch_norm_blob71" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale71" type: "Scale" bottom: "batch_norm_blob71" top: "batch_norm_blob71" scale_param { bias_term: true } } layer { name: "relu64" type: "ReLU" bottom: "batch_norm_blob71" top: "relu_blob64" } layer { name: "conv72" type: "Convolution" bottom: "relu_blob64" top: "conv_blob72" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm72" type: "BatchNorm" bottom: "conv_blob72" top: "batch_norm_blob72" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale72" type: "Scale" bottom: "batch_norm_blob72" top: "batch_norm_blob72" scale_param { bias_term: true } } layer { name: "add38" type: "Eltwise" bottom: "batch_norm_blob72" bottom: "relu_blob63" top: "add_blob38" eltwise_param { operation: SUM } } layer { name: "relu65" type: "ReLU" bottom: "add_blob38" top: "relu_blob65" } layer { name: "conv73" type: "Convolution" bottom: "relu_blob61" top: "conv_blob73" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm73" type: "BatchNorm" bottom: "conv_blob73" top: "batch_norm_blob73" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale73" type: "Scale" bottom: "batch_norm_blob73" top: "batch_norm_blob73" scale_param { bias_term: true } } layer { name: "upsample8" type: "Interp" bottom: "batch_norm_blob73" top: "upsample_blob8" interp_param { height: 64 width: 64 } } layer { name: "add39" type: "Eltwise" bottom: "relu_blob57" bottom: "upsample_blob8" top: "add_blob39" eltwise_param { operation: SUM } } layer { name: "conv74" type: "Convolution" bottom: "relu_blob65" top: "conv_blob74" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm74" type: "BatchNorm" bottom: "conv_blob74" top: "batch_norm_blob74" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale74" type: "Scale" bottom: "batch_norm_blob74" top: "batch_norm_blob74" scale_param { bias_term: true } } layer { name: "upsample9" type: "Interp" bottom: "batch_norm_blob74" top: "upsample_blob9" interp_param { height: 64 width: 64 } } layer { name: "add40" type: "Eltwise" bottom: "add_blob39" bottom: "upsample_blob9" top: "add_blob40" eltwise_param { operation: SUM } } layer { name: "relu66" type: "ReLU" bottom: "add_blob40" top: "relu_blob66" } layer { name: "conv75" type: "Convolution" bottom: "relu_blob57" top: "conv_blob75" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm75" type: "BatchNorm" bottom: "conv_blob75" top: "batch_norm_blob75" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale75" type: "Scale" bottom: "batch_norm_blob75" top: "batch_norm_blob75" scale_param { bias_term: true } } layer { name: "add41" type: "Eltwise" bottom: "batch_norm_blob75" bottom: "relu_blob61" top: "add_blob41" eltwise_param { operation: SUM } } layer { name: "conv76" type: "Convolution" bottom: "relu_blob65" top: "conv_blob76" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm76" type: "BatchNorm" bottom: "conv_blob76" top: "batch_norm_blob76" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale76" type: "Scale" bottom: "batch_norm_blob76" top: "batch_norm_blob76" scale_param { bias_term: true } } layer { name: "upsample10" type: "Interp" bottom: "batch_norm_blob76" top: "upsample_blob10" interp_param { height: 32 width: 32 } } layer { name: "add42" type: "Eltwise" bottom: "add_blob41" bottom: "upsample_blob10" top: "add_blob42" eltwise_param { operation: SUM } } layer { name: "relu67" type: "ReLU" bottom: "add_blob42" top: "relu_blob67" } layer { name: "conv77" type: "Convolution" bottom: "relu_blob57" top: "conv_blob77" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm77" type: "BatchNorm" bottom: "conv_blob77" top: "batch_norm_blob77" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale77" type: "Scale" bottom: "batch_norm_blob77" top: "batch_norm_blob77" scale_param { bias_term: true } } layer { name: "relu68" type: "ReLU" bottom: "batch_norm_blob77" top: "relu_blob68" } layer { name: "conv78" type: "Convolution" bottom: "relu_blob68" top: "conv_blob78" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm78" type: "BatchNorm" bottom: "conv_blob78" top: "batch_norm_blob78" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale78" type: "Scale" bottom: "batch_norm_blob78" top: "batch_norm_blob78" scale_param { bias_term: true } } layer { name: "conv79" type: "Convolution" bottom: "relu_blob61" top: "conv_blob79" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm79" type: "BatchNorm" bottom: "conv_blob79" top: "batch_norm_blob79" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale79" type: "Scale" bottom: "batch_norm_blob79" top: "batch_norm_blob79" scale_param { bias_term: true } } layer { name: "add43" type: "Eltwise" bottom: "batch_norm_blob78" bottom: "batch_norm_blob79" top: "add_blob43" eltwise_param { operation: SUM } } layer { name: "add44" type: "Eltwise" bottom: "add_blob43" bottom: "relu_blob65" top: "add_blob44" eltwise_param { operation: SUM } } layer { name: "relu69" type: "ReLU" bottom: "add_blob44" top: "relu_blob69" } layer { name: "conv80" type: "Convolution" bottom: "relu_blob69" top: "conv_blob80" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm80" type: "BatchNorm" bottom: "conv_blob80" top: "batch_norm_blob80" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale80" type: "Scale" bottom: "batch_norm_blob80" top: "batch_norm_blob80" scale_param { bias_term: true } } layer { name: "relu70" type: "ReLU" bottom: "batch_norm_blob80" top: "relu_blob70" } layer { name: "conv81" type: "Convolution" bottom: "relu_blob66" top: "conv_blob81" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm81" type: "BatchNorm" bottom: "conv_blob81" top: "batch_norm_blob81" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale81" type: "Scale" bottom: "batch_norm_blob81" top: "batch_norm_blob81" scale_param { bias_term: true } } layer { name: "relu71" type: "ReLU" bottom: "batch_norm_blob81" top: "relu_blob71" } layer { name: "conv82" type: "Convolution" bottom: "relu_blob71" top: "conv_blob82" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm82" type: "BatchNorm" bottom: "conv_blob82" top: "batch_norm_blob82" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale82" type: "Scale" bottom: "batch_norm_blob82" top: "batch_norm_blob82" scale_param { bias_term: true } } layer { name: "add45" type: "Eltwise" bottom: "batch_norm_blob82" bottom: "relu_blob66" top: "add_blob45" eltwise_param { operation: SUM } } layer { name: "relu72" type: "ReLU" bottom: "add_blob45" top: "relu_blob72" } layer { name: "conv83" type: "Convolution" bottom: "relu_blob72" top: "conv_blob83" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm83" type: "BatchNorm" bottom: "conv_blob83" top: "batch_norm_blob83" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale83" type: "Scale" bottom: "batch_norm_blob83" top: "batch_norm_blob83" scale_param { bias_term: true } } layer { name: "relu73" type: "ReLU" bottom: "batch_norm_blob83" top: "relu_blob73" } layer { name: "conv84" type: "Convolution" bottom: "relu_blob73" top: "conv_blob84" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm84" type: "BatchNorm" bottom: "conv_blob84" top: "batch_norm_blob84" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale84" type: "Scale" bottom: "batch_norm_blob84" top: "batch_norm_blob84" scale_param { bias_term: true } } layer { name: "add46" type: "Eltwise" bottom: "batch_norm_blob84" bottom: "relu_blob72" top: "add_blob46" eltwise_param { operation: SUM } } layer { name: "relu74" type: "ReLU" bottom: "add_blob46" top: "relu_blob74" } layer { name: "conv85" type: "Convolution" bottom: "relu_blob67" top: "conv_blob85" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm85" type: "BatchNorm" bottom: "conv_blob85" top: "batch_norm_blob85" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale85" type: "Scale" bottom: "batch_norm_blob85" top: "batch_norm_blob85" scale_param { bias_term: true } } layer { name: "relu75" type: "ReLU" bottom: "batch_norm_blob85" top: "relu_blob75" } layer { name: "conv86" type: "Convolution" bottom: "relu_blob75" top: "conv_blob86" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm86" type: "BatchNorm" bottom: "conv_blob86" top: "batch_norm_blob86" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale86" type: "Scale" bottom: "batch_norm_blob86" top: "batch_norm_blob86" scale_param { bias_term: true } } layer { name: "add47" type: "Eltwise" bottom: "batch_norm_blob86" bottom: "relu_blob67" top: "add_blob47" eltwise_param { operation: SUM } } layer { name: "relu76" type: "ReLU" bottom: "add_blob47" top: "relu_blob76" } layer { name: "conv87" type: "Convolution" bottom: "relu_blob76" top: "conv_blob87" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm87" type: "BatchNorm" bottom: "conv_blob87" top: "batch_norm_blob87" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale87" type: "Scale" bottom: "batch_norm_blob87" top: "batch_norm_blob87" scale_param { bias_term: true } } layer { name: "relu77" type: "ReLU" bottom: "batch_norm_blob87" top: "relu_blob77" } layer { name: "conv88" type: "Convolution" bottom: "relu_blob77" top: "conv_blob88" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm88" type: "BatchNorm" bottom: "conv_blob88" top: "batch_norm_blob88" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale88" type: "Scale" bottom: "batch_norm_blob88" top: "batch_norm_blob88" scale_param { bias_term: true } } layer { name: "add48" type: "Eltwise" bottom: "batch_norm_blob88" bottom: "relu_blob76" top: "add_blob48" eltwise_param { operation: SUM } } layer { name: "relu78" type: "ReLU" bottom: "add_blob48" top: "relu_blob78" } layer { name: "conv89" type: "Convolution" bottom: "relu_blob69" top: "conv_blob89" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm89" type: "BatchNorm" bottom: "conv_blob89" top: "batch_norm_blob89" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale89" type: "Scale" bottom: "batch_norm_blob89" top: "batch_norm_blob89" scale_param { bias_term: true } } layer { name: "relu79" type: "ReLU" bottom: "batch_norm_blob89" top: "relu_blob79" } layer { name: "conv90" type: "Convolution" bottom: "relu_blob79" top: "conv_blob90" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm90" type: "BatchNorm" bottom: "conv_blob90" top: "batch_norm_blob90" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale90" type: "Scale" bottom: "batch_norm_blob90" top: "batch_norm_blob90" scale_param { bias_term: true } } layer { name: "add49" type: "Eltwise" bottom: "batch_norm_blob90" bottom: "relu_blob69" top: "add_blob49" eltwise_param { operation: SUM } } layer { name: "relu80" type: "ReLU" bottom: "add_blob49" top: "relu_blob80" } layer { name: "conv91" type: "Convolution" bottom: "relu_blob80" top: "conv_blob91" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm91" type: "BatchNorm" bottom: "conv_blob91" top: "batch_norm_blob91" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale91" type: "Scale" bottom: "batch_norm_blob91" top: "batch_norm_blob91" scale_param { bias_term: true } } layer { name: "relu81" type: "ReLU" bottom: "batch_norm_blob91" top: "relu_blob81" } layer { name: "conv92" type: "Convolution" bottom: "relu_blob81" top: "conv_blob92" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm92" type: "BatchNorm" bottom: "conv_blob92" top: "batch_norm_blob92" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale92" type: "Scale" bottom: "batch_norm_blob92" top: "batch_norm_blob92" scale_param { bias_term: true } } layer { name: "add50" type: "Eltwise" bottom: "batch_norm_blob92" bottom: "relu_blob80" top: "add_blob50" eltwise_param { operation: SUM } } layer { name: "relu82" type: "ReLU" bottom: "add_blob50" top: "relu_blob82" } layer { name: "conv93" type: "Convolution" bottom: "relu_blob70" top: "conv_blob93" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm93" type: "BatchNorm" bottom: "conv_blob93" top: "batch_norm_blob93" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale93" type: "Scale" bottom: "batch_norm_blob93" top: "batch_norm_blob93" scale_param { bias_term: true } } layer { name: "relu83" type: "ReLU" bottom: "batch_norm_blob93" top: "relu_blob83" } layer { name: "conv94" type: "Convolution" bottom: "relu_blob83" top: "conv_blob94" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm94" type: "BatchNorm" bottom: "conv_blob94" top: "batch_norm_blob94" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale94" type: "Scale" bottom: "batch_norm_blob94" top: "batch_norm_blob94" scale_param { bias_term: true } } layer { name: "add51" type: "Eltwise" bottom: "batch_norm_blob94" bottom: "relu_blob70" top: "add_blob51" eltwise_param { operation: SUM } } layer { name: "relu84" type: "ReLU" bottom: "add_blob51" top: "relu_blob84" } layer { name: "conv95" type: "Convolution" bottom: "relu_blob84" top: "conv_blob95" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm95" type: "BatchNorm" bottom: "conv_blob95" top: "batch_norm_blob95" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale95" type: "Scale" bottom: "batch_norm_blob95" top: "batch_norm_blob95" scale_param { bias_term: true } } layer { name: "relu85" type: "ReLU" bottom: "batch_norm_blob95" top: "relu_blob85" } layer { name: "conv96" type: "Convolution" bottom: "relu_blob85" top: "conv_blob96" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm96" type: "BatchNorm" bottom: "conv_blob96" top: "batch_norm_blob96" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale96" type: "Scale" bottom: "batch_norm_blob96" top: "batch_norm_blob96" scale_param { bias_term: true } } layer { name: "add52" type: "Eltwise" bottom: "batch_norm_blob96" bottom: "relu_blob84" top: "add_blob52" eltwise_param { operation: SUM } } layer { name: "relu86" type: "ReLU" bottom: "add_blob52" top: "relu_blob86" } layer { name: "conv97" type: "Convolution" bottom: "relu_blob78" top: "conv_blob97" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm97" type: "BatchNorm" bottom: "conv_blob97" top: "batch_norm_blob97" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale97" type: "Scale" bottom: "batch_norm_blob97" top: "batch_norm_blob97" scale_param { bias_term: true } } layer { name: "upsample11" type: "Interp" bottom: "batch_norm_blob97" top: "upsample_blob11" interp_param { height: 64 width: 64 } } layer { name: "add53" type: "Eltwise" bottom: "relu_blob74" bottom: "upsample_blob11" top: "add_blob53" eltwise_param { operation: SUM } } layer { name: "conv98" type: "Convolution" bottom: "relu_blob82" top: "conv_blob98" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm98" type: "BatchNorm" bottom: "conv_blob98" top: "batch_norm_blob98" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale98" type: "Scale" bottom: "batch_norm_blob98" top: "batch_norm_blob98" scale_param { bias_term: true } } layer { name: "upsample12" type: "Interp" bottom: "batch_norm_blob98" top: "upsample_blob12" interp_param { height: 64 width: 64 } } layer { name: "add54" type: "Eltwise" bottom: "add_blob53" bottom: "upsample_blob12" top: "add_blob54" eltwise_param { operation: SUM } } layer { name: "conv99" type: "Convolution" bottom: "relu_blob86" top: "conv_blob99" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm99" type: "BatchNorm" bottom: "conv_blob99" top: "batch_norm_blob99" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale99" type: "Scale" bottom: "batch_norm_blob99" top: "batch_norm_blob99" scale_param { bias_term: true } } layer { name: "upsample13" type: "Interp" bottom: "batch_norm_blob99" top: "upsample_blob13" interp_param { height: 64 width: 64 } } layer { name: "add55" type: "Eltwise" bottom: "add_blob54" bottom: "upsample_blob13" top: "add_blob55" eltwise_param { operation: SUM } } layer { name: "relu87" type: "ReLU" bottom: "add_blob55" top: "relu_blob87" } layer { name: "conv100" type: "Convolution" bottom: "relu_blob74" top: "conv_blob100" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm100" type: "BatchNorm" bottom: "conv_blob100" top: "batch_norm_blob100" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale100" type: "Scale" bottom: "batch_norm_blob100" top: "batch_norm_blob100" scale_param { bias_term: true } } layer { name: "add56" type: "Eltwise" bottom: "batch_norm_blob100" bottom: "relu_blob78" top: "add_blob56" eltwise_param { operation: SUM } } layer { name: "conv101" type: "Convolution" bottom: "relu_blob82" top: "conv_blob101" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm101" type: "BatchNorm" bottom: "conv_blob101" top: "batch_norm_blob101" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale101" type: "Scale" bottom: "batch_norm_blob101" top: "batch_norm_blob101" scale_param { bias_term: true } } layer { name: "upsample14" type: "Interp" bottom: "batch_norm_blob101" top: "upsample_blob14" interp_param { height: 32 width: 32 } } layer { name: "add57" type: "Eltwise" bottom: "add_blob56" bottom: "upsample_blob14" top: "add_blob57" eltwise_param { operation: SUM } } layer { name: "conv102" type: "Convolution" bottom: "relu_blob86" top: "conv_blob102" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm102" type: "BatchNorm" bottom: "conv_blob102" top: "batch_norm_blob102" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale102" type: "Scale" bottom: "batch_norm_blob102" top: "batch_norm_blob102" scale_param { bias_term: true } } layer { name: "upsample15" type: "Interp" bottom: "batch_norm_blob102" top: "upsample_blob15" interp_param { height: 32 width: 32 } } layer { name: "add58" type: "Eltwise" bottom: "add_blob57" bottom: "upsample_blob15" top: "add_blob58" eltwise_param { operation: SUM } } layer { name: "relu88" type: "ReLU" bottom: "add_blob58" top: "relu_blob88" } layer { name: "conv103" type: "Convolution" bottom: "relu_blob74" top: "conv_blob103" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm103" type: "BatchNorm" bottom: "conv_blob103" top: "batch_norm_blob103" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale103" type: "Scale" bottom: "batch_norm_blob103" top: "batch_norm_blob103" scale_param { bias_term: true } } layer { name: "relu89" type: "ReLU" bottom: "batch_norm_blob103" top: "relu_blob89" } layer { name: "conv104" type: "Convolution" bottom: "relu_blob89" top: "conv_blob104" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm104" type: "BatchNorm" bottom: "conv_blob104" top: "batch_norm_blob104" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale104" type: "Scale" bottom: "batch_norm_blob104" top: "batch_norm_blob104" scale_param { bias_term: true } } layer { name: "conv105" type: "Convolution" bottom: "relu_blob78" top: "conv_blob105" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm105" type: "BatchNorm" bottom: "conv_blob105" top: "batch_norm_blob105" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale105" type: "Scale" bottom: "batch_norm_blob105" top: "batch_norm_blob105" scale_param { bias_term: true } } layer { name: "add59" type: "Eltwise" bottom: "batch_norm_blob104" bottom: "batch_norm_blob105" top: "add_blob59" eltwise_param { operation: SUM } } layer { name: "add60" type: "Eltwise" bottom: "add_blob59" bottom: "relu_blob82" top: "add_blob60" eltwise_param { operation: SUM } } layer { name: "conv106" type: "Convolution" bottom: "relu_blob86" top: "conv_blob106" convolution_param { num_output: 72 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm106" type: "BatchNorm" bottom: "conv_blob106" top: "batch_norm_blob106" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale106" type: "Scale" bottom: "batch_norm_blob106" top: "batch_norm_blob106" scale_param { bias_term: true } } layer { name: "upsample16" type: "Interp" bottom: "batch_norm_blob106" top: "upsample_blob16" interp_param { height: 16 width: 16 } } layer { name: "add61" type: "Eltwise" bottom: "add_blob60" bottom: "upsample_blob16" top: "add_blob61" eltwise_param { operation: SUM } } layer { name: "relu90" type: "ReLU" bottom: "add_blob61" top: "relu_blob90" } layer { name: "conv107" type: "Convolution" bottom: "relu_blob74" top: "conv_blob107" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm107" type: "BatchNorm" bottom: "conv_blob107" top: "batch_norm_blob107" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale107" type: "Scale" bottom: "batch_norm_blob107" top: "batch_norm_blob107" scale_param { bias_term: true } } layer { name: "relu91" type: "ReLU" bottom: "batch_norm_blob107" top: "relu_blob91" } layer { name: "conv108" type: "Convolution" bottom: "relu_blob91" top: "conv_blob108" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm108" type: "BatchNorm" bottom: "conv_blob108" top: "batch_norm_blob108" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale108" type: "Scale" bottom: "batch_norm_blob108" top: "batch_norm_blob108" scale_param { bias_term: true } } layer { name: "relu92" type: "ReLU" bottom: "batch_norm_blob108" top: "relu_blob92" } layer { name: "conv109" type: "Convolution" bottom: "relu_blob92" top: "conv_blob109" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm109" type: "BatchNorm" bottom: "conv_blob109" top: "batch_norm_blob109" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale109" type: "Scale" bottom: "batch_norm_blob109" top: "batch_norm_blob109" scale_param { bias_term: true } } layer { name: "conv110" type: "Convolution" bottom: "relu_blob78" top: "conv_blob110" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm110" type: "BatchNorm" bottom: "conv_blob110" top: "batch_norm_blob110" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale110" type: "Scale" bottom: "batch_norm_blob110" top: "batch_norm_blob110" scale_param { bias_term: true } } layer { name: "relu93" type: "ReLU" bottom: "batch_norm_blob110" top: "relu_blob93" } layer { name: "conv111" type: "Convolution" bottom: "relu_blob93" top: "conv_blob111" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm111" type: "BatchNorm" bottom: "conv_blob111" top: "batch_norm_blob111" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale111" type: "Scale" bottom: "batch_norm_blob111" top: "batch_norm_blob111" scale_param { bias_term: true } } layer { name: "add62" type: "Eltwise" bottom: "batch_norm_blob109" bottom: "batch_norm_blob111" top: "add_blob62" eltwise_param { operation: SUM } } layer { name: "conv112" type: "Convolution" bottom: "relu_blob82" top: "conv_blob112" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm112" type: "BatchNorm" bottom: "conv_blob112" top: "batch_norm_blob112" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale112" type: "Scale" bottom: "batch_norm_blob112" top: "batch_norm_blob112" scale_param { bias_term: true } } layer { name: "add63" type: "Eltwise" bottom: "add_blob62" bottom: "batch_norm_blob112" top: "add_blob63" eltwise_param { operation: SUM } } layer { name: "add64" type: "Eltwise" bottom: "add_blob63" bottom: "relu_blob86" top: "add_blob64" eltwise_param { operation: SUM } } layer { name: "relu94" type: "ReLU" bottom: "add_blob64" top: "relu_blob94" } layer { name: "conv113" type: "Convolution" bottom: "relu_blob87" top: "conv_blob113" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm113" type: "BatchNorm" bottom: "conv_blob113" top: "batch_norm_blob113" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale113" type: "Scale" bottom: "batch_norm_blob113" top: "batch_norm_blob113" scale_param { bias_term: true } } layer { name: "relu95" type: "ReLU" bottom: "batch_norm_blob113" top: "relu_blob95" } layer { name: "conv114" type: "Convolution" bottom: "relu_blob95" top: "conv_blob114" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm114" type: "BatchNorm" bottom: "conv_blob114" top: "batch_norm_blob114" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale114" type: "Scale" bottom: "batch_norm_blob114" top: "batch_norm_blob114" scale_param { bias_term: true } } layer { name: "add65" type: "Eltwise" bottom: "batch_norm_blob114" bottom: "relu_blob87" top: "add_blob65" eltwise_param { operation: SUM } } layer { name: "relu96" type: "ReLU" bottom: "add_blob65" top: "relu_blob96" } layer { name: "conv115" type: "Convolution" bottom: "relu_blob96" top: "conv_blob115" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm115" type: "BatchNorm" bottom: "conv_blob115" top: "batch_norm_blob115" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale115" type: "Scale" bottom: "batch_norm_blob115" top: "batch_norm_blob115" scale_param { bias_term: true } } layer { name: "relu97" type: "ReLU" bottom: "batch_norm_blob115" top: "relu_blob97" } layer { name: "conv116" type: "Convolution" bottom: "relu_blob97" top: "conv_blob116" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm116" type: "BatchNorm" bottom: "conv_blob116" top: "batch_norm_blob116" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale116" type: "Scale" bottom: "batch_norm_blob116" top: "batch_norm_blob116" scale_param { bias_term: true } } layer { name: "add66" type: "Eltwise" bottom: "batch_norm_blob116" bottom: "relu_blob96" top: "add_blob66" eltwise_param { operation: SUM } } layer { name: "relu98" type: "ReLU" bottom: "add_blob66" top: "relu_blob98" } layer { name: "conv117" type: "Convolution" bottom: "relu_blob88" top: "conv_blob117" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm117" type: "BatchNorm" bottom: "conv_blob117" top: "batch_norm_blob117" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale117" type: "Scale" bottom: "batch_norm_blob117" top: "batch_norm_blob117" scale_param { bias_term: true } } layer { name: "relu99" type: "ReLU" bottom: "batch_norm_blob117" top: "relu_blob99" } layer { name: "conv118" type: "Convolution" bottom: "relu_blob99" top: "conv_blob118" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm118" type: "BatchNorm" bottom: "conv_blob118" top: "batch_norm_blob118" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale118" type: "Scale" bottom: "batch_norm_blob118" top: "batch_norm_blob118" scale_param { bias_term: true } } layer { name: "add67" type: "Eltwise" bottom: "batch_norm_blob118" bottom: "relu_blob88" top: "add_blob67" eltwise_param { operation: SUM } } layer { name: "relu100" type: "ReLU" bottom: "add_blob67" top: "relu_blob100" } layer { name: "conv119" type: "Convolution" bottom: "relu_blob100" top: "conv_blob119" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm119" type: "BatchNorm" bottom: "conv_blob119" top: "batch_norm_blob119" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale119" type: "Scale" bottom: "batch_norm_blob119" top: "batch_norm_blob119" scale_param { bias_term: true } } layer { name: "relu101" type: "ReLU" bottom: "batch_norm_blob119" top: "relu_blob101" } layer { name: "conv120" type: "Convolution" bottom: "relu_blob101" top: "conv_blob120" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm120" type: "BatchNorm" bottom: "conv_blob120" top: "batch_norm_blob120" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale120" type: "Scale" bottom: "batch_norm_blob120" top: "batch_norm_blob120" scale_param { bias_term: true } } layer { name: "add68" type: "Eltwise" bottom: "batch_norm_blob120" bottom: "relu_blob100" top: "add_blob68" eltwise_param { operation: SUM } } layer { name: "relu102" type: "ReLU" bottom: "add_blob68" top: "relu_blob102" } layer { name: "conv121" type: "Convolution" bottom: "relu_blob90" top: "conv_blob121" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm121" type: "BatchNorm" bottom: "conv_blob121" top: "batch_norm_blob121" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale121" type: "Scale" bottom: "batch_norm_blob121" top: "batch_norm_blob121" scale_param { bias_term: true } } layer { name: "relu103" type: "ReLU" bottom: "batch_norm_blob121" top: "relu_blob103" } layer { name: "conv122" type: "Convolution" bottom: "relu_blob103" top: "conv_blob122" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm122" type: "BatchNorm" bottom: "conv_blob122" top: "batch_norm_blob122" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale122" type: "Scale" bottom: "batch_norm_blob122" top: "batch_norm_blob122" scale_param { bias_term: true } } layer { name: "add69" type: "Eltwise" bottom: "batch_norm_blob122" bottom: "relu_blob90" top: "add_blob69" eltwise_param { operation: SUM } } layer { name: "relu104" type: "ReLU" bottom: "add_blob69" top: "relu_blob104" } layer { name: "conv123" type: "Convolution" bottom: "relu_blob104" top: "conv_blob123" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm123" type: "BatchNorm" bottom: "conv_blob123" top: "batch_norm_blob123" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale123" type: "Scale" bottom: "batch_norm_blob123" top: "batch_norm_blob123" scale_param { bias_term: true } } layer { name: "relu105" type: "ReLU" bottom: "batch_norm_blob123" top: "relu_blob105" } layer { name: "conv124" type: "Convolution" bottom: "relu_blob105" top: "conv_blob124" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm124" type: "BatchNorm" bottom: "conv_blob124" top: "batch_norm_blob124" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale124" type: "Scale" bottom: "batch_norm_blob124" top: "batch_norm_blob124" scale_param { bias_term: true } } layer { name: "add70" type: "Eltwise" bottom: "batch_norm_blob124" bottom: "relu_blob104" top: "add_blob70" eltwise_param { operation: SUM } } layer { name: "relu106" type: "ReLU" bottom: "add_blob70" top: "relu_blob106" } layer { name: "conv125" type: "Convolution" bottom: "relu_blob94" top: "conv_blob125" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm125" type: "BatchNorm" bottom: "conv_blob125" top: "batch_norm_blob125" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale125" type: "Scale" bottom: "batch_norm_blob125" top: "batch_norm_blob125" scale_param { bias_term: true } } layer { name: "relu107" type: "ReLU" bottom: "batch_norm_blob125" top: "relu_blob107" } layer { name: "conv126" type: "Convolution" bottom: "relu_blob107" top: "conv_blob126" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm126" type: "BatchNorm" bottom: "conv_blob126" top: "batch_norm_blob126" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale126" type: "Scale" bottom: "batch_norm_blob126" top: "batch_norm_blob126" scale_param { bias_term: true } } layer { name: "add71" type: "Eltwise" bottom: "batch_norm_blob126" bottom: "relu_blob94" top: "add_blob71" eltwise_param { operation: SUM } } layer { name: "relu108" type: "ReLU" bottom: "add_blob71" top: "relu_blob108" } layer { name: "conv127" type: "Convolution" bottom: "relu_blob108" top: "conv_blob127" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm127" type: "BatchNorm" bottom: "conv_blob127" top: "batch_norm_blob127" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale127" type: "Scale" bottom: "batch_norm_blob127" top: "batch_norm_blob127" scale_param { bias_term: true } } layer { name: "relu109" type: "ReLU" bottom: "batch_norm_blob127" top: "relu_blob109" } layer { name: "conv128" type: "Convolution" bottom: "relu_blob109" top: "conv_blob128" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm128" type: "BatchNorm" bottom: "conv_blob128" top: "batch_norm_blob128" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale128" type: "Scale" bottom: "batch_norm_blob128" top: "batch_norm_blob128" scale_param { bias_term: true } } layer { name: "add72" type: "Eltwise" bottom: "batch_norm_blob128" bottom: "relu_blob108" top: "add_blob72" eltwise_param { operation: SUM } } layer { name: "relu110" type: "ReLU" bottom: "add_blob72" top: "relu_blob110" } layer { name: "conv129" type: "Convolution" bottom: "relu_blob102" top: "conv_blob129" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm129" type: "BatchNorm" bottom: "conv_blob129" top: "batch_norm_blob129" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale129" type: "Scale" bottom: "batch_norm_blob129" top: "batch_norm_blob129" scale_param { bias_term: true } } layer { name: "upsample17" type: "Interp" bottom: "batch_norm_blob129" top: "upsample_blob17" interp_param { height: 64 width: 64 } } layer { name: "add73" type: "Eltwise" bottom: "relu_blob98" bottom: "upsample_blob17" top: "add_blob73" eltwise_param { operation: SUM } } layer { name: "conv130" type: "Convolution" bottom: "relu_blob106" top: "conv_blob130" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm130" type: "BatchNorm" bottom: "conv_blob130" top: "batch_norm_blob130" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale130" type: "Scale" bottom: "batch_norm_blob130" top: "batch_norm_blob130" scale_param { bias_term: true } } layer { name: "upsample18" type: "Interp" bottom: "batch_norm_blob130" top: "upsample_blob18" interp_param { height: 64 width: 64 } } layer { name: "add74" type: "Eltwise" bottom: "add_blob73" bottom: "upsample_blob18" top: "add_blob74" eltwise_param { operation: SUM } } layer { name: "conv131" type: "Convolution" bottom: "relu_blob110" top: "conv_blob131" convolution_param { num_output: 18 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm131" type: "BatchNorm" bottom: "conv_blob131" top: "batch_norm_blob131" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale131" type: "Scale" bottom: "batch_norm_blob131" top: "batch_norm_blob131" scale_param { bias_term: true } } layer { name: "upsample19" type: "Interp" bottom: "batch_norm_blob131" top: "upsample_blob19" interp_param { height: 64 width: 64 } } layer { name: "add75" type: "Eltwise" bottom: "add_blob74" bottom: "upsample_blob19" top: "add_blob75" eltwise_param { operation: SUM } } layer { name: "relu111" type: "ReLU" bottom: "add_blob75" top: "relu_blob111" } layer { name: "conv132" type: "Convolution" bottom: "relu_blob98" top: "conv_blob132" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm132" type: "BatchNorm" bottom: "conv_blob132" top: "batch_norm_blob132" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale132" type: "Scale" bottom: "batch_norm_blob132" top: "batch_norm_blob132" scale_param { bias_term: true } } layer { name: "add76" type: "Eltwise" bottom: "batch_norm_blob132" bottom: "relu_blob102" top: "add_blob76" eltwise_param { operation: SUM } } layer { name: "conv133" type: "Convolution" bottom: "relu_blob106" top: "conv_blob133" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm133" type: "BatchNorm" bottom: "conv_blob133" top: "batch_norm_blob133" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale133" type: "Scale" bottom: "batch_norm_blob133" top: "batch_norm_blob133" scale_param { bias_term: true } } layer { name: "upsample20" type: "Interp" bottom: "batch_norm_blob133" top: "upsample_blob20" interp_param { height: 32 width: 32 } } layer { name: "add77" type: "Eltwise" bottom: "add_blob76" bottom: "upsample_blob20" top: "add_blob77" eltwise_param { operation: SUM } } layer { name: "conv134" type: "Convolution" bottom: "relu_blob110" top: "conv_blob134" convolution_param { num_output: 36 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm134" type: "BatchNorm" bottom: "conv_blob134" top: "batch_norm_blob134" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale134" type: "Scale" bottom: "batch_norm_blob134" top: "batch_norm_blob134" scale_param { bias_term: true } } layer { name: "upsample21" type: "Interp" bottom: "batch_norm_blob134" top: "upsample_blob21" interp_param { height: 32 width: 32 } } layer { name: "add78" type: "Eltwise" bottom: "add_blob77" bottom: "upsample_blob21" top: "add_blob78" eltwise_param { operation: SUM } } layer { name: "relu112" type: "ReLU" bottom: "add_blob78" top: "relu_blob112" } layer { name: "conv135" type: "Convolution" bottom: "relu_blob98" top: "conv_blob135" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm135" type: "BatchNorm" bottom: "conv_blob135" top: "batch_norm_blob135" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale135" type: "Scale" bottom: "batch_norm_blob135" top: "batch_norm_blob135" scale_param { bias_term: true } } layer { name: "relu113" type: "ReLU" bottom: "batch_norm_blob135" top: "relu_blob113" } layer { name: "conv136" type: "Convolution" bottom: "relu_blob113" top: "conv_blob136" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm136" type: "BatchNorm" bottom: "conv_blob136" top: "batch_norm_blob136" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale136" type: "Scale" bottom: "batch_norm_blob136" top: "batch_norm_blob136" scale_param { bias_term: true } } layer { name: "conv137" type: "Convolution" bottom: "relu_blob102" top: "conv_blob137" convolution_param { num_output: 72 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm137" type: "BatchNorm" bottom: "conv_blob137" top: "batch_norm_blob137" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale137" type: "Scale" bottom: "batch_norm_blob137" top: "batch_norm_blob137" scale_param { bias_term: true } } layer { name: "add79" type: "Eltwise" bottom: "batch_norm_blob136" bottom: "batch_norm_blob137" top: "add_blob79" eltwise_param { operation: SUM } } layer { name: "add80" type: "Eltwise" bottom: "add_blob79" bottom: "relu_blob106" top: "add_blob80" eltwise_param { operation: SUM } } layer { name: "conv138" type: "Convolution" bottom: "relu_blob110" top: "conv_blob138" convolution_param { num_output: 72 bias_term: false pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm138" type: "BatchNorm" bottom: "conv_blob138" top: "batch_norm_blob138" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale138" type: "Scale" bottom: "batch_norm_blob138" top: "batch_norm_blob138" scale_param { bias_term: true } } layer { name: "upsample22" type: "Interp" bottom: "batch_norm_blob138" top: "upsample_blob22" interp_param { height: 16 width: 16 } } layer { name: "add81" type: "Eltwise" bottom: "add_blob80" bottom: "upsample_blob22" top: "add_blob81" eltwise_param { operation: SUM } } layer { name: "relu114" type: "ReLU" bottom: "add_blob81" top: "relu_blob114" } layer { name: "conv139" type: "Convolution" bottom: "relu_blob98" top: "conv_blob139" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm139" type: "BatchNorm" bottom: "conv_blob139" top: "batch_norm_blob139" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale139" type: "Scale" bottom: "batch_norm_blob139" top: "batch_norm_blob139" scale_param { bias_term: true } } layer { name: "relu115" type: "ReLU" bottom: "batch_norm_blob139" top: "relu_blob115" } layer { name: "conv140" type: "Convolution" bottom: "relu_blob115" top: "conv_blob140" convolution_param { num_output: 18 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm140" type: "BatchNorm" bottom: "conv_blob140" top: "batch_norm_blob140" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale140" type: "Scale" bottom: "batch_norm_blob140" top: "batch_norm_blob140" scale_param { bias_term: true } } layer { name: "relu116" type: "ReLU" bottom: "batch_norm_blob140" top: "relu_blob116" } layer { name: "conv141" type: "Convolution" bottom: "relu_blob116" top: "conv_blob141" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm141" type: "BatchNorm" bottom: "conv_blob141" top: "batch_norm_blob141" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale141" type: "Scale" bottom: "batch_norm_blob141" top: "batch_norm_blob141" scale_param { bias_term: true } } layer { name: "conv142" type: "Convolution" bottom: "relu_blob102" top: "conv_blob142" convolution_param { num_output: 36 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm142" type: "BatchNorm" bottom: "conv_blob142" top: "batch_norm_blob142" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale142" type: "Scale" bottom: "batch_norm_blob142" top: "batch_norm_blob142" scale_param { bias_term: true } } layer { name: "relu117" type: "ReLU" bottom: "batch_norm_blob142" top: "relu_blob117" } layer { name: "conv143" type: "Convolution" bottom: "relu_blob117" top: "conv_blob143" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm143" type: "BatchNorm" bottom: "conv_blob143" top: "batch_norm_blob143" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale143" type: "Scale" bottom: "batch_norm_blob143" top: "batch_norm_blob143" scale_param { bias_term: true } } layer { name: "add82" type: "Eltwise" bottom: "batch_norm_blob141" bottom: "batch_norm_blob143" top: "add_blob82" eltwise_param { operation: SUM } } layer { name: "conv144" type: "Convolution" bottom: "relu_blob106" top: "conv_blob144" convolution_param { num_output: 144 bias_term: false pad: 1 kernel_size: 3 group: 1 stride: 2 weight_filler { type: "xavier" } dilation: 1 } } layer { name: "batch_norm144" type: "BatchNorm" bottom: "conv_blob144" top: "batch_norm_blob144" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale144" type: "Scale" bottom: "batch_norm_blob144" top: "batch_norm_blob144" scale_param { bias_term: true } } layer { name: "add83" type: "Eltwise" bottom: "add_blob82" bottom: "batch_norm_blob144" top: "add_blob83" eltwise_param { operation: SUM } } layer { name: "add84" type: "Eltwise" bottom: "add_blob83" bottom: "relu_blob110" top: "add_blob84" eltwise_param { operation: SUM } } layer { name: "relu118" type: "ReLU" bottom: "add_blob84" top: "relu_blob118" } layer { name: "upsample23" type: "Interp" bottom: "relu_blob112" top: "upsample_blob23" interp_param { height: 64 width: 64 } } layer { name: "upsample24" type: "Interp" bottom: "relu_blob114" top: "upsample_blob24" interp_param { height: 64 width: 64 } } layer { name: "upsample25" type: "Interp" bottom: "relu_blob118" top: "upsample_blob25" interp_param { height: 64 width: 64 } } layer { name: "cat1" type: "Concat" bottom: "relu_blob111" bottom: "upsample_blob23" bottom: "upsample_blob24" bottom: "upsample_blob25" top: "cat_blob1" concat_param { axis: 1 } } layer { name: "conv145" type: "Convolution" bottom: "cat_blob1" top: "conv_blob145" convolution_param { num_output: 270 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } } layer { name: "batch_norm145" type: "BatchNorm" bottom: "conv_blob145" top: "batch_norm_blob145" batch_norm_param { use_global_stats: true eps: 9.9999997e-06 } } layer { name: "bn_scale145" type: "Scale" bottom: "batch_norm_blob145" top: "batch_norm_blob145" scale_param { bias_term: true } } layer { name: "relu119" type: "ReLU" bottom: "batch_norm_blob145" top: "relu_blob119" } layer { name: "conv146" type: "Convolution" bottom: "relu_blob119" top: "conv_blob146" convolution_param { num_output: 21 bias_term: true pad: 0 kernel_size: 1 group: 1 stride: 1 weight_filler { type: "xavier" } bias_filler { type: "constant" } dilation: 1 } }