input: "data" input_dim: 1 input_dim: 3 input_dim: 512 input_dim: 512 layer { bottom: "data" top: "bn_data" name: "bn_data" type: "BatchNorm" } layer { bottom: "bn_data" top: "bn_data_scale" name: "bn_data_scale" type: "Scale" scale_param { bias_term: true } } layer { name: "ConvNdBackward1" type: "Convolution" bottom: "bn_data_scale" top: "ConvNdBackward1" convolution_param { num_output: 64 kernel_size: 7 pad: 3 stride: 2 bias_term: false } } layer { name: "BatchNormBackward2_bn" type: "BatchNorm" bottom: "ConvNdBackward1" top: "BatchNormBackward2" } layer { name: "BatchNormBackward2_scale" type: "Scale" bottom: "BatchNormBackward2" top: "BatchNormBackward2" scale_param { bias_term: true } } layer { name: "ThresholdBackward3" type: "ReLU" bottom: "BatchNormBackward2" top: "BatchNormBackward2" } layer { name: "MaxPool2DBackward4" type: "Pooling" bottom: "BatchNormBackward2" top: "MaxPool2DBackward4" pooling_param { pool: MAX kernel_size: 3 pad: 0 stride: 2 } } layer { name: "ConvNdBackward5" type: "Convolution" bottom: "MaxPool2DBackward4" top: "ConvNdBackward5" convolution_param { num_output: 32 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward6_bn" type: "BatchNorm" bottom: "ConvNdBackward5" top: "BatchNormBackward6" } layer { name: "BatchNormBackward6_scale" type: "Scale" bottom: "BatchNormBackward6" top: "BatchNormBackward6" scale_param { bias_term: true } } layer { name: "ThresholdBackward7" type: "ReLU" bottom: "BatchNormBackward6" top: "BatchNormBackward6" } layer { name: "ConvNdBackward8" type: "Convolution" bottom: "BatchNormBackward6" top: "ConvNdBackward8" convolution_param { num_output: 32 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward9_bn" type: "BatchNorm" bottom: "ConvNdBackward8" top: "BatchNormBackward9" } layer { name: "BatchNormBackward9_scale" type: "Scale" bottom: "BatchNormBackward9" top: "BatchNormBackward9" scale_param { bias_term: true } } layer { name: "ConvNdBackward10" type: "Convolution" bottom: "MaxPool2DBackward4" top: "ConvNdBackward10" convolution_param { num_output: 32 kernel_size: 1 pad: 0 stride: 1 bias_term: false } } layer { name: "BatchNormBackward10_bn" type: "BatchNorm" bottom: "ConvNdBackward10" top: "BatchNormBackward10" } layer { name: "BatchNormBackward10_scale" type: "Scale" bottom: "BatchNormBackward10" top: "BatchNormBackward10" scale_param { bias_term: true } } layer { name: "AddBackward111" type: "Eltwise" bottom: "BatchNormBackward9" bottom: "BatchNormBackward10" top: "AddBackward111" } layer { name: "ThresholdBackward12" type: "ReLU" bottom: "AddBackward111" top: "AddBackward111" } layer { name: "ConvNdBackward13" type: "Convolution" bottom: "AddBackward111" top: "ConvNdBackward13" convolution_param { num_output: 32 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward14_bn" type: "BatchNorm" bottom: "ConvNdBackward13" top: "BatchNormBackward14" } layer { name: "BatchNormBackward14_scale" type: "Scale" bottom: "BatchNormBackward14" top: "BatchNormBackward14" scale_param { bias_term: true } } layer { name: "ThresholdBackward15" type: "ReLU" bottom: "BatchNormBackward14" top: "BatchNormBackward14" } layer { name: "ConvNdBackward16" type: "Convolution" bottom: "BatchNormBackward14" top: "ConvNdBackward16" convolution_param { num_output: 32 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward17_bn" type: "BatchNorm" bottom: "ConvNdBackward16" top: "BatchNormBackward17" } layer { name: "BatchNormBackward17_scale" type: "Scale" bottom: "BatchNormBackward17" top: "BatchNormBackward17" scale_param { bias_term: true } } layer { name: "AddBackward119" type: "Eltwise" bottom: "BatchNormBackward17" bottom: "AddBackward111" top: "AddBackward119" } layer { name: "ThresholdBackward20" type: "ReLU" bottom: "AddBackward119" top: "AddBackward119" } layer { name: "ConvNdBackward21" type: "Convolution" bottom: "AddBackward119" top: "ConvNdBackward21" convolution_param { num_output: 64 kernel_size: 3 pad: 1 stride: 2 bias_term: false } } layer { name: "BatchNormBackward22_bn" type: "BatchNorm" bottom: "ConvNdBackward21" top: "BatchNormBackward22" } layer { name: "BatchNormBackward22_scale" type: "Scale" bottom: "BatchNormBackward22" top: "BatchNormBackward22" scale_param { bias_term: true } } layer { name: "ThresholdBackward23" type: "ReLU" bottom: "BatchNormBackward22" top: "BatchNormBackward22" } layer { name: "ConvNdBackward24" type: "Convolution" bottom: "BatchNormBackward22" top: "ConvNdBackward24" convolution_param { num_output: 64 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward25_bn" type: "BatchNorm" bottom: "ConvNdBackward24" top: "BatchNormBackward25" } layer { name: "BatchNormBackward25_scale" type: "Scale" bottom: "BatchNormBackward25" top: "BatchNormBackward25" scale_param { bias_term: true } } layer { name: "ConvNdBackward27" type: "Convolution" bottom: "AddBackward119" top: "ConvNdBackward27" convolution_param { num_output: 64 kernel_size: 1 pad: 0 stride: 2 bias_term: false } } layer { name: "BatchNormBackward28_bn" type: "BatchNorm" bottom: "ConvNdBackward27" top: "BatchNormBackward28" } layer { name: "BatchNormBackward28_scale" type: "Scale" bottom: "BatchNormBackward28" top: "BatchNormBackward28" scale_param { bias_term: true } } layer { name: "AddBackward129" type: "Eltwise" bottom: "BatchNormBackward25" bottom: "BatchNormBackward28" top: "AddBackward129" } layer { name: "ThresholdBackward30" type: "ReLU" bottom: "AddBackward129" top: "AddBackward129" } layer { name: "ConvNdBackward31" type: "Convolution" bottom: "AddBackward129" top: "ConvNdBackward31" convolution_param { num_output: 64 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward32_bn" type: "BatchNorm" bottom: "ConvNdBackward31" top: "BatchNormBackward32" } layer { name: "BatchNormBackward32_scale" type: "Scale" bottom: "BatchNormBackward32" top: "BatchNormBackward32" scale_param { bias_term: true } } layer { name: "ThresholdBackward33" type: "ReLU" bottom: "BatchNormBackward32" top: "BatchNormBackward32" } layer { name: "ConvNdBackward34" type: "Convolution" bottom: "BatchNormBackward32" top: "ConvNdBackward34" convolution_param { num_output: 64 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward35_bn" type: "BatchNorm" bottom: "ConvNdBackward34" top: "BatchNormBackward35" } layer { name: "BatchNormBackward35_scale" type: "Scale" bottom: "BatchNormBackward35" top: "BatchNormBackward35" scale_param { bias_term: true } } layer { name: "AddBackward137" type: "Eltwise" bottom: "BatchNormBackward35" bottom: "AddBackward129" top: "AddBackward137" } layer { name: "ThresholdBackward38" type: "ReLU" bottom: "AddBackward137" top: "AddBackward137" } layer { name: "ConvNdBackward39" type: "Convolution" bottom: "AddBackward137" top: "ConvNdBackward39" convolution_param { num_output: 128 kernel_size: 3 pad: 1 stride: 2 bias_term: false } } layer { name: "BatchNormBackward40_bn" type: "BatchNorm" bottom: "ConvNdBackward39" top: "BatchNormBackward40" } layer { name: "BatchNormBackward40_scale" type: "Scale" bottom: "BatchNormBackward40" top: "BatchNormBackward40" scale_param { bias_term: true } } layer { name: "ThresholdBackward41" type: "ReLU" bottom: "BatchNormBackward40" top: "BatchNormBackward40" } layer { name: "ConvNdBackward42" type: "Convolution" bottom: "BatchNormBackward40" top: "ConvNdBackward42" convolution_param { num_output: 128 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward43_bn" type: "BatchNorm" bottom: "ConvNdBackward42" top: "BatchNormBackward43" } layer { name: "BatchNormBackward43_scale" type: "Scale" bottom: "BatchNormBackward43" top: "BatchNormBackward43" scale_param { bias_term: true } } layer { name: "ConvNdBackward45" type: "Convolution" bottom: "AddBackward137" top: "ConvNdBackward45" convolution_param { num_output: 128 kernel_size: 1 pad: 0 stride: 2 bias_term: false } } layer { name: "BatchNormBackward46_bn" type: "BatchNorm" bottom: "ConvNdBackward45" top: "BatchNormBackward46" } layer { name: "BatchNormBackward46_scale" type: "Scale" bottom: "BatchNormBackward46" top: "BatchNormBackward46" scale_param { bias_term: true } } layer { name: "AddBackward147" type: "Eltwise" bottom: "BatchNormBackward43" bottom: "BatchNormBackward46" top: "AddBackward147" } layer { name: "ThresholdBackward48" type: "ReLU" bottom: "AddBackward147" top: "AddBackward147" } layer { name: "ConvNdBackward49" type: "Convolution" bottom: "AddBackward147" top: "ConvNdBackward49" convolution_param { num_output: 128 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward50_bn" type: "BatchNorm" bottom: "ConvNdBackward49" top: "BatchNormBackward50" } layer { name: "BatchNormBackward50_scale" type: "Scale" bottom: "BatchNormBackward50" top: "BatchNormBackward50" scale_param { bias_term: true } } layer { name: "ThresholdBackward51" type: "ReLU" bottom: "BatchNormBackward50" top: "BatchNormBackward50" } layer { name: "ConvNdBackward52" type: "Convolution" bottom: "BatchNormBackward50" top: "ConvNdBackward52" convolution_param { num_output: 128 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward53_bn" type: "BatchNorm" bottom: "ConvNdBackward52" top: "BatchNormBackward53" } layer { name: "BatchNormBackward53_scale" type: "Scale" bottom: "BatchNormBackward53" top: "BatchNormBackward53" scale_param { bias_term: true } } layer { name: "AddBackward155" type: "Eltwise" bottom: "BatchNormBackward53" bottom: "AddBackward147" top: "AddBackward155" } layer { name: "ThresholdBackward56" type: "ReLU" bottom: "AddBackward155" top: "AddBackward155" } layer { name: "BatchNormBackward57_bn" type: "BatchNorm" bottom: "AddBackward155" top: "BatchNormBackward57" } layer { name: "BatchNormBackward57_scale" type: "Scale" bottom: "BatchNormBackward57" top: "BatchNormBackward57" scale_param { bias_term: true } } layer { name: "ThresholdBackward58" type: "ReLU" bottom: "BatchNormBackward57" top: "BatchNormBackward57" } layer { name: "ConvNdBackward59" type: "Convolution" bottom: "BatchNormBackward57" top: "ConvNdBackward59" convolution_param { num_output: 128 kernel_size: 1 pad: 0 stride: 1 bias_term: false } } layer { name: "ConvNdBackward61" type: "Convolution" bottom: "AddBackward155" top: "ConvNdBackward61" convolution_param { num_output: 256 kernel_size: 3 pad: 1 stride: 2 bias_term: false } } layer { name: "BatchNormBackward62_bn" type: "BatchNorm" bottom: "ConvNdBackward61" top: "BatchNormBackward62" } layer { name: "BatchNormBackward62_scale" type: "Scale" bottom: "BatchNormBackward62" top: "BatchNormBackward62" scale_param { bias_term: true } } layer { name: "ThresholdBackward63" type: "ReLU" bottom: "BatchNormBackward62" top: "BatchNormBackward62" } layer { name: "ConvNdBackward64" type: "Convolution" bottom: "BatchNormBackward62" top: "ConvNdBackward64" convolution_param { num_output: 256 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward65_bn" type: "BatchNorm" bottom: "ConvNdBackward64" top: "BatchNormBackward65" } layer { name: "BatchNormBackward65_scale" type: "Scale" bottom: "BatchNormBackward65" top: "BatchNormBackward65" scale_param { bias_term: true } } layer { name: "ConvNdBackward67" type: "Convolution" bottom: "AddBackward155" top: "ConvNdBackward67" convolution_param { num_output: 256 kernel_size: 1 pad: 0 stride: 2 bias_term: false } } layer { name: "BatchNormBackward68_bn" type: "BatchNorm" bottom: "ConvNdBackward67" top: "BatchNormBackward68" } layer { name: "BatchNormBackward68_scale" type: "Scale" bottom: "BatchNormBackward68" top: "BatchNormBackward68" scale_param { bias_term: true } } layer { name: "AddBackward169" type: "Eltwise" bottom: "BatchNormBackward65" bottom: "BatchNormBackward68" top: "AddBackward169" } layer { name: "ThresholdBackward70" type: "ReLU" bottom: "AddBackward169" top: "AddBackward169" } layer { name: "ConvNdBackward71" type: "Convolution" bottom: "AddBackward169" top: "ConvNdBackward71" convolution_param { num_output: 256 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward72_bn" type: "BatchNorm" bottom: "ConvNdBackward71" top: "BatchNormBackward72" } layer { name: "BatchNormBackward72_scale" type: "Scale" bottom: "BatchNormBackward72" top: "BatchNormBackward72" scale_param { bias_term: true } } layer { name: "ThresholdBackward73" type: "ReLU" bottom: "BatchNormBackward72" top: "BatchNormBackward72" } layer { name: "ConvNdBackward74" type: "Convolution" bottom: "BatchNormBackward72" top: "ConvNdBackward74" convolution_param { num_output: 256 kernel_size: 3 pad: 1 stride: 1 bias_term: false } } layer { name: "BatchNormBackward75_bn" type: "BatchNorm" bottom: "ConvNdBackward74" top: "BatchNormBackward75" } layer { name: "BatchNormBackward75_scale" type: "Scale" bottom: "BatchNormBackward75" top: "BatchNormBackward75" scale_param { bias_term: true } } layer { name: "AddBackward177" type: "Eltwise" bottom: "BatchNormBackward75" bottom: "AddBackward169" top: "AddBackward177" } layer { name: "ThresholdBackward78" type: "ReLU" bottom: "AddBackward177" top: "AddBackward177" } layer { name: "BatchNormBackward79_bn" type: "BatchNorm" bottom: "AddBackward177" top: "BatchNormBackward79" } layer { name: "BatchNormBackward79_scale" type: "Scale" bottom: "BatchNormBackward79" top: "BatchNormBackward79" scale_param { bias_term: true } } layer { name: "ThresholdBackward80" type: "ReLU" bottom: "BatchNormBackward79" top: "BatchNormBackward79" } layer { name: "ConvNdBackward81" type: "Convolution" bottom: "BatchNormBackward79" top: "ConvNdBackward81" convolution_param { num_output: 128 kernel_size: 1 pad: 0 stride: 1 bias_term: false } } layer { name: "Upsampling_1" type: "Interp" bottom: "ConvNdBackward81" top: "Upsampling_1" interp_param { height: 32 width: 32 } } layer { name: "AddBackward183" type: "Eltwise" bottom: "ConvNdBackward59" bottom: "Upsampling_1" top: "AddBackward183" } layer { name: "BatchNormBackward84_bn" type: "BatchNorm" bottom: "AddBackward183" top: "BatchNormBackward84" } layer { name: "BatchNormBackward84_scale" type: "Scale" bottom: "BatchNormBackward84" top: "BatchNormBackward84" scale_param { bias_term: true } } layer { name: "ThresholdBackward85" type: "ReLU" bottom: "BatchNormBackward84" top: "BatchNormBackward84" } layer { name: "ConvNdBackward86" type: "Convolution" bottom: "BatchNormBackward84" top: "ConvNdBackward86" convolution_param { dilation: 4 num_output: 128 kernel_size: 3 pad: 4 stride: 1 bias_term: false } } layer { name: "Upsampling_2" type: "Interp" bottom: "ConvNdBackward86" top: "Upsampling_2" interp_param { height: 64 width: 64 } } layer { name: "BatchNormBackward89_bn" type: "BatchNorm" bottom: "AddBackward137" top: "BatchNormBackward89" } layer { name: "BatchNormBackward89_scale" type: "Scale" bottom: "BatchNormBackward89" top: "BatchNormBackward89" scale_param { bias_term: true } } layer { name: "ThresholdBackward90" type: "ReLU" bottom: "BatchNormBackward89" top: "BatchNormBackward89" } layer { name: "ConvNdBackward91" type: "Convolution" bottom: "BatchNormBackward89" top: "ConvNdBackward91" convolution_param { num_output: 128 kernel_size: 1 pad: 0 stride: 1 bias_term: false } } layer { name: "AddBackward192" type: "Eltwise" bottom: "Upsampling_2" bottom: "ConvNdBackward91" top: "AddBackward192" } layer { name: "BatchNormBackward93_bn" type: "BatchNorm" bottom: "AddBackward192" top: "BatchNormBackward93" } layer { name: "BatchNormBackward93_scale" type: "Scale" bottom: "BatchNormBackward93" top: "BatchNormBackward93" scale_param { bias_term: true } } layer { name: "ThresholdBackward94" type: "ReLU" bottom: "BatchNormBackward93" top: "BatchNormBackward93" } layer { name: "ConvNdBackward95" type: "Convolution" bottom: "BatchNormBackward93" top: "ConvNdBackward95" convolution_param { dilation: 7 num_output: 128 kernel_size: 3 pad: 7 stride: 1 bias_term: false } } layer { name: "Upsampling_3" type: "Interp" bottom: "ConvNdBackward95" top: "Upsampling_3" interp_param { height: 128 width: 128 } } layer { name: "BatchNormBackward98_bn" type: "BatchNorm" bottom: "AddBackward119" top: "BatchNormBackward98" } layer { name: "BatchNormBackward98_scale" type: "Scale" bottom: "BatchNormBackward98" top: "BatchNormBackward98" scale_param { bias_term: true } } layer { name: "ThresholdBackward99" type: "ReLU" bottom: "BatchNormBackward98" top: "BatchNormBackward98" } layer { name: "ConvNdBackward100" type: "Convolution" bottom: "BatchNormBackward98" top: "ConvNdBackward100" convolution_param { num_output: 128 kernel_size: 1 pad: 0 stride: 1 bias_term: false } } layer { name: "AddBackward1101" type: "Eltwise" bottom: "Upsampling_3" bottom: "ConvNdBackward100" top: "AddBackward1101" } layer { name: "BatchNormBackward102_bn" type: "BatchNorm" bottom: "AddBackward1101" top: "BatchNormBackward102" } layer { name: "BatchNormBackward102_scale" type: "Scale" bottom: "BatchNormBackward102" top: "BatchNormBackward102" scale_param { bias_term: true } } layer { name: "ThresholdBackward103" type: "ReLU" bottom: "BatchNormBackward102" top: "BatchNormBackward102" } layer { name: "ConvNdBackward104_Portrait" type: "Convolution" bottom: "BatchNormBackward102" top: "ConvNdBackward104_Portrait" convolution_param { num_output: 2 kernel_size: 3 pad: 1 stride: 1 } } layer { name: "Upsampling_out" type: "Interp" bottom: "ConvNdBackward104_Portrait" top: "Upsampling_out" interp_param { height: 512 width: 512 } } layer { name: "SoftmaxClassification" type: "Softmax" bottom: "Upsampling_out" top: "SoftmaxClassification" }