name: "pytorch" input: "data" input_dim: 1 input_dim: 3 input_dim: 256 input_dim: 256 layer { name: "ConvNdBackward1" type: "Convolution" bottom: "data" top: "ConvNdBackward1" convolution_param { num_output: 16 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward2_bn" type: "BatchNorm" bottom: "ConvNdBackward1" top: "BatchNormBackward2" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward2_scale" type: "Scale" bottom: "BatchNormBackward2" top: "BatchNormBackward2" scale_param { bias_term: true } } layer { name: "ThresholdBackward3" type: "ReLU" bottom: "BatchNormBackward2" top: "BatchNormBackward2" } layer { name: "ConvNdBackward4" type: "Convolution" bottom: "BatchNormBackward2" top: "ConvNdBackward4" convolution_param { num_output: 16 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward5_bn" type: "BatchNorm" bottom: "ConvNdBackward4" top: "BatchNormBackward5" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward5_scale" type: "Scale" bottom: "BatchNormBackward5" top: "BatchNormBackward5" scale_param { bias_term: true } } layer { name: "ThresholdBackward6" type: "ReLU" bottom: "BatchNormBackward5" top: "BatchNormBackward5" } layer { name: "MaxPool2dBackward8" type: "Pooling" bottom: "BatchNormBackward5" top: "MaxPool2dBackward8" pooling_param { pool: MAX kernel_size: 2 stride: 2 pad: 0 } } layer { name: "ConvNdBackward9" type: "Convolution" bottom: "MaxPool2dBackward8" top: "ConvNdBackward9" convolution_param { num_output: 32 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward10_bn" type: "BatchNorm" bottom: "ConvNdBackward9" top: "BatchNormBackward10" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward10_scale" type: "Scale" bottom: "BatchNormBackward10" top: "BatchNormBackward10" scale_param { bias_term: true } } layer { name: "ThresholdBackward11" type: "ReLU" bottom: "BatchNormBackward10" top: "BatchNormBackward10" } layer { name: "ConvNdBackward12" type: "Convolution" bottom: "BatchNormBackward10" top: "ConvNdBackward12" convolution_param { num_output: 32 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward13_bn" type: "BatchNorm" bottom: "ConvNdBackward12" top: "BatchNormBackward13" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward13_scale" type: "Scale" bottom: "BatchNormBackward13" top: "BatchNormBackward13" scale_param { bias_term: true } } layer { name: "ThresholdBackward14" type: "ReLU" bottom: "BatchNormBackward13" top: "BatchNormBackward13" } layer { name: "MaxPool2dBackward16" type: "Pooling" bottom: "BatchNormBackward13" top: "MaxPool2dBackward16" pooling_param { pool: MAX kernel_size: 2 stride: 2 pad: 0 } } layer { name: "ConvNdBackward17" type: "Convolution" bottom: "MaxPool2dBackward16" top: "ConvNdBackward17" convolution_param { num_output: 64 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward18_bn" type: "BatchNorm" bottom: "ConvNdBackward17" top: "BatchNormBackward18" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward18_scale" type: "Scale" bottom: "BatchNormBackward18" top: "BatchNormBackward18" scale_param { bias_term: true } } layer { name: "ThresholdBackward19" type: "ReLU" bottom: "BatchNormBackward18" top: "BatchNormBackward18" } layer { name: "ConvNdBackward20" type: "Convolution" bottom: "BatchNormBackward18" top: "ConvNdBackward20" convolution_param { num_output: 64 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward21_bn" type: "BatchNorm" bottom: "ConvNdBackward20" top: "BatchNormBackward21" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward21_scale" type: "Scale" bottom: "BatchNormBackward21" top: "BatchNormBackward21" scale_param { bias_term: true } } layer { name: "ThresholdBackward22" type: "ReLU" bottom: "BatchNormBackward21" top: "BatchNormBackward21" } layer { name: "MaxPool2dBackward24" type: "Pooling" bottom: "BatchNormBackward21" top: "MaxPool2dBackward24" pooling_param { pool: MAX kernel_size: 2 stride: 2 pad: 0 } } layer { name: "ConvNdBackward25" type: "Convolution" bottom: "MaxPool2dBackward24" top: "ConvNdBackward25" convolution_param { num_output: 128 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward26_bn" type: "BatchNorm" bottom: "ConvNdBackward25" top: "BatchNormBackward26" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward26_scale" type: "Scale" bottom: "BatchNormBackward26" top: "BatchNormBackward26" scale_param { bias_term: true } } layer { name: "ThresholdBackward27" type: "ReLU" bottom: "BatchNormBackward26" top: "BatchNormBackward26" } layer { name: "ConvNdBackward28" type: "Convolution" bottom: "BatchNormBackward26" top: "ConvNdBackward28" convolution_param { num_output: 128 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward29_bn" type: "BatchNorm" bottom: "ConvNdBackward28" top: "BatchNormBackward29" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward29_scale" type: "Scale" bottom: "BatchNormBackward29" top: "BatchNormBackward29" scale_param { bias_term: true } } layer { name: "ThresholdBackward30" type: "ReLU" bottom: "BatchNormBackward29" top: "BatchNormBackward29" } layer { name: "MaxPool2dBackward32" type: "Pooling" bottom: "BatchNormBackward29" top: "MaxPool2dBackward32" pooling_param { pool: MAX kernel_size: 2 stride: 2 pad: 0 } } layer { name: "ConvNdBackward33" type: "Convolution" bottom: "MaxPool2dBackward32" top: "ConvNdBackward33" convolution_param { num_output: 256 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward34_bn" type: "BatchNorm" bottom: "ConvNdBackward33" top: "BatchNormBackward34" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward34_scale" type: "Scale" bottom: "BatchNormBackward34" top: "BatchNormBackward34" scale_param { bias_term: true } } layer { name: "ThresholdBackward35" type: "ReLU" bottom: "BatchNormBackward34" top: "BatchNormBackward34" } layer { name: "ConvNdBackward36" type: "Convolution" bottom: "BatchNormBackward34" top: "ConvNdBackward36" convolution_param { num_output: 256 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "BatchNormBackward37_bn" type: "BatchNorm" bottom: "ConvNdBackward36" top: "BatchNormBackward37" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward37_scale" type: "Scale" bottom: "BatchNormBackward37" top: "BatchNormBackward37" scale_param { bias_term: true } } layer { name: "ThresholdBackward38" type: "ReLU" bottom: "BatchNormBackward37" top: "BatchNormBackward37" } layer { name: "ConvNdBackward39" type: "Deconvolution" bottom: "BatchNormBackward37" top: "ConvNdBackward39" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 2 kernel_w: 2 stride: 2 dilation: 1 } } layer { name: "ConcatBackward40" type: "Concat" bottom: "BatchNormBackward29" bottom: "ConvNdBackward39" top: "ConcatBackward40" concat_param { axis: 1 } } layer { name: "ConvNdBackward41" type: "Convolution" bottom: "ConcatBackward40" top: "ConvNdBackward41" convolution_param { num_output: 128 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward42" type: "ReLU" bottom: "ConvNdBackward41" top: "ConvNdBackward41" } layer { name: "ConvNdBackward43" type: "Convolution" bottom: "ConvNdBackward41" top: "ConvNdBackward43" convolution_param { num_output: 128 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward44" type: "ReLU" bottom: "ConvNdBackward43" top: "ConvNdBackward43" } layer { name: "ConvNdBackward45" type: "Deconvolution" bottom: "ConvNdBackward43" top: "ConvNdBackward45" convolution_param { num_output: 64 group: 1 pad_h: 0 pad_w: 0 kernel_h: 2 kernel_w: 2 stride: 2 dilation: 1 } } layer { name: "ConcatBackward46" type: "Concat" bottom: "BatchNormBackward21" bottom: "ConvNdBackward45" top: "ConcatBackward46" concat_param { axis: 1 } } layer { name: "ConvNdBackward47" type: "Convolution" bottom: "ConcatBackward46" top: "ConvNdBackward47" convolution_param { num_output: 64 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward48" type: "ReLU" bottom: "ConvNdBackward47" top: "ConvNdBackward47" } layer { name: "ConvNdBackward49" type: "Convolution" bottom: "ConvNdBackward47" top: "ConvNdBackward49" convolution_param { num_output: 64 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward50" type: "ReLU" bottom: "ConvNdBackward49" top: "ConvNdBackward49" } layer { name: "ConvNdBackward51" type: "Deconvolution" bottom: "ConvNdBackward49" top: "ConvNdBackward51" convolution_param { num_output: 32 group: 1 pad_h: 0 pad_w: 0 kernel_h: 2 kernel_w: 2 stride: 2 dilation: 1 } } layer { name: "ConcatBackward52" type: "Concat" bottom: "BatchNormBackward13" bottom: "ConvNdBackward51" top: "ConcatBackward52" concat_param { axis: 1 } } layer { name: "ConvNdBackward53" type: "Convolution" bottom: "ConcatBackward52" top: "ConvNdBackward53" convolution_param { num_output: 32 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward54" type: "ReLU" bottom: "ConvNdBackward53" top: "ConvNdBackward53" } layer { name: "ConvNdBackward55" type: "Convolution" bottom: "ConvNdBackward53" top: "ConvNdBackward55" convolution_param { num_output: 32 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward56" type: "ReLU" bottom: "ConvNdBackward55" top: "ConvNdBackward55" } layer { name: "ConvNdBackward57" type: "Deconvolution" bottom: "ConvNdBackward55" top: "ConvNdBackward57" convolution_param { num_output: 16 group: 1 pad_h: 0 pad_w: 0 kernel_h: 2 kernel_w: 2 stride: 2 dilation: 1 } } layer { name: "ConcatBackward58" type: "Concat" bottom: "BatchNormBackward5" bottom: "ConvNdBackward57" top: "ConcatBackward58" concat_param { axis: 1 } } layer { name: "ConvNdBackward59" type: "Convolution" bottom: "ConcatBackward58" top: "ConvNdBackward59" convolution_param { num_output: 16 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward60" type: "ReLU" bottom: "ConvNdBackward59" top: "ConvNdBackward59" } layer { name: "ConvNdBackward61" type: "Convolution" bottom: "ConvNdBackward59" top: "ConvNdBackward61" convolution_param { num_output: 16 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 } } layer { name: "ThresholdBackward62" type: "ReLU" bottom: "ConvNdBackward61" top: "ConvNdBackward61" } layer { name: "ConvNdBackward63" type: "Convolution" bottom: "ConvNdBackward61" top: "ConvNdBackward63" convolution_param { num_output: 17 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 } }