name: "pytorch" input: "data" input_dim: 1 input_dim: 3 input_dim: 256 input_dim: 256 layer { name: "ConvNdBackward1" type: "Convolution" bottom: "data" top: "ConvNdBackward1" convolution_param { num_output: 8 group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 2 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward2_bn" type: "BatchNorm" bottom: "ConvNdBackward1" top: "BatchNormBackward2" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward2_scale" type: "Scale" bottom: "BatchNormBackward2" top: "BatchNormBackward2" scale_param { bias_term: true } } layer { name: "ThresholdBackward3" type: "ReLU" bottom: "BatchNormBackward2" top: "BatchNormBackward2" } layer { name: "ConvNdBackward4" type: "Convolution" bottom: "BatchNormBackward2" top: "ConvNdBackward4" convolution_param { num_output: 8 group: 8 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward5_bn" type: "BatchNorm" bottom: "ConvNdBackward4" top: "BatchNormBackward5" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward5_scale" type: "Scale" bottom: "BatchNormBackward5" top: "BatchNormBackward5" scale_param { bias_term: true } } layer { name: "ThresholdBackward6" type: "ReLU" bottom: "BatchNormBackward5" top: "BatchNormBackward5" } layer { name: "ConvNdBackward7" type: "Convolution" bottom: "BatchNormBackward5" top: "ConvNdBackward7" convolution_param { num_output: 16 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward8_bn" type: "BatchNorm" bottom: "ConvNdBackward7" top: "BatchNormBackward8" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward8_scale" type: "Scale" bottom: "BatchNormBackward8" top: "BatchNormBackward8" scale_param { bias_term: true } } layer { name: "ThresholdBackward9" type: "ReLU" bottom: "BatchNormBackward8" top: "BatchNormBackward8" } layer { name: "ConvNdBackward10" type: "Convolution" bottom: "BatchNormBackward8" top: "ConvNdBackward10" convolution_param { num_output: 16 group: 16 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 2 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward11_bn" type: "BatchNorm" bottom: "ConvNdBackward10" top: "BatchNormBackward11" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward11_scale" type: "Scale" bottom: "BatchNormBackward11" top: "BatchNormBackward11" scale_param { bias_term: true } } layer { name: "ThresholdBackward12" type: "ReLU" bottom: "BatchNormBackward11" top: "BatchNormBackward11" } layer { name: "ConvNdBackward13" type: "Convolution" bottom: "BatchNormBackward11" top: "ConvNdBackward13" convolution_param { num_output: 32 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward14_bn" type: "BatchNorm" bottom: "ConvNdBackward13" top: "BatchNormBackward14" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward14_scale" type: "Scale" bottom: "BatchNormBackward14" top: "BatchNormBackward14" scale_param { bias_term: true } } layer { name: "ThresholdBackward15" type: "ReLU" bottom: "BatchNormBackward14" top: "BatchNormBackward14" } layer { name: "ConvNdBackward16" type: "Convolution" bottom: "BatchNormBackward14" top: "ConvNdBackward16" convolution_param { num_output: 32 group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward17_bn" type: "BatchNorm" bottom: "ConvNdBackward16" top: "BatchNormBackward17" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward17_scale" type: "Scale" bottom: "BatchNormBackward17" top: "BatchNormBackward17" scale_param { bias_term: true } } layer { name: "ThresholdBackward18" type: "ReLU" bottom: "BatchNormBackward17" top: "BatchNormBackward17" } layer { name: "ConvNdBackward19" type: "Convolution" bottom: "BatchNormBackward17" top: "ConvNdBackward19" convolution_param { num_output: 32 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward20_bn" type: "BatchNorm" bottom: "ConvNdBackward19" top: "BatchNormBackward20" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward20_scale" type: "Scale" bottom: "BatchNormBackward20" top: "BatchNormBackward20" scale_param { bias_term: true } } layer { name: "ThresholdBackward21" type: "ReLU" bottom: "BatchNormBackward20" top: "BatchNormBackward20" } layer { name: "ConvNdBackward22" type: "Convolution" bottom: "BatchNormBackward20" top: "ConvNdBackward22" convolution_param { num_output: 32 group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 2 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward23_bn" type: "BatchNorm" bottom: "ConvNdBackward22" top: "BatchNormBackward23" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward23_scale" type: "Scale" bottom: "BatchNormBackward23" top: "BatchNormBackward23" scale_param { bias_term: true } } layer { name: "ThresholdBackward24" type: "ReLU" bottom: "BatchNormBackward23" top: "BatchNormBackward23" } layer { name: "ConvNdBackward25" type: "Convolution" bottom: "BatchNormBackward23" top: "ConvNdBackward25" convolution_param { num_output: 64 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward26_bn" type: "BatchNorm" bottom: "ConvNdBackward25" top: "BatchNormBackward26" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward26_scale" type: "Scale" bottom: "BatchNormBackward26" top: "BatchNormBackward26" scale_param { bias_term: true } } layer { name: "ThresholdBackward27" type: "ReLU" bottom: "BatchNormBackward26" top: "BatchNormBackward26" } layer { name: "ConvNdBackward28" type: "Convolution" bottom: "BatchNormBackward26" top: "ConvNdBackward28" convolution_param { num_output: 64 group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward29_bn" type: "BatchNorm" bottom: "ConvNdBackward28" top: "BatchNormBackward29" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward29_scale" type: "Scale" bottom: "BatchNormBackward29" top: "BatchNormBackward29" scale_param { bias_term: true } } layer { name: "ThresholdBackward30" type: "ReLU" bottom: "BatchNormBackward29" top: "BatchNormBackward29" } layer { name: "ConvNdBackward31" type: "Convolution" bottom: "BatchNormBackward29" top: "ConvNdBackward31" convolution_param { num_output: 64 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward32_bn" type: "BatchNorm" bottom: "ConvNdBackward31" top: "BatchNormBackward32" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward32_scale" type: "Scale" bottom: "BatchNormBackward32" top: "BatchNormBackward32" scale_param { bias_term: true } } layer { name: "ThresholdBackward33" type: "ReLU" bottom: "BatchNormBackward32" top: "BatchNormBackward32" } layer { name: "ConvNdBackward34" type: "Convolution" bottom: "BatchNormBackward32" top: "ConvNdBackward34" convolution_param { num_output: 64 group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 2 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward35_bn" type: "BatchNorm" bottom: "ConvNdBackward34" top: "BatchNormBackward35" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward35_scale" type: "Scale" bottom: "BatchNormBackward35" top: "BatchNormBackward35" scale_param { bias_term: true } } layer { name: "ThresholdBackward36" type: "ReLU" bottom: "BatchNormBackward35" top: "BatchNormBackward35" } layer { name: "ConvNdBackward37" type: "Convolution" bottom: "BatchNormBackward35" top: "ConvNdBackward37" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward38_bn" type: "BatchNorm" bottom: "ConvNdBackward37" top: "BatchNormBackward38" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward38_scale" type: "Scale" bottom: "BatchNormBackward38" top: "BatchNormBackward38" scale_param { bias_term: true } } layer { name: "ThresholdBackward39" type: "ReLU" bottom: "BatchNormBackward38" top: "BatchNormBackward38" } layer { name: "ConvNdBackward40" type: "Convolution" bottom: "BatchNormBackward38" top: "ConvNdBackward40" convolution_param { num_output: 128 group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward41_bn" type: "BatchNorm" bottom: "ConvNdBackward40" top: "BatchNormBackward41" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward41_scale" type: "Scale" bottom: "BatchNormBackward41" top: "BatchNormBackward41" scale_param { bias_term: true } } layer { name: "ThresholdBackward42" type: "ReLU" bottom: "BatchNormBackward41" top: "BatchNormBackward41" } layer { name: "ConvNdBackward43" type: "Convolution" bottom: "BatchNormBackward41" top: "ConvNdBackward43" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward44_bn" type: "BatchNorm" bottom: "ConvNdBackward43" top: "BatchNormBackward44" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward44_scale" type: "Scale" bottom: "BatchNormBackward44" top: "BatchNormBackward44" scale_param { bias_term: true } } layer { name: "ThresholdBackward45" type: "ReLU" bottom: "BatchNormBackward44" top: "BatchNormBackward44" } layer { name: "ConvNdBackward46" type: "Convolution" bottom: "BatchNormBackward44" top: "ConvNdBackward46" convolution_param { num_output: 128 group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward47_bn" type: "BatchNorm" bottom: "ConvNdBackward46" top: "BatchNormBackward47" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward47_scale" type: "Scale" bottom: "BatchNormBackward47" top: "BatchNormBackward47" scale_param { bias_term: true } } layer { name: "ThresholdBackward48" type: "ReLU" bottom: "BatchNormBackward47" top: "BatchNormBackward47" } layer { name: "ConvNdBackward49" type: "Convolution" bottom: "BatchNormBackward47" top: "ConvNdBackward49" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward50_bn" type: "BatchNorm" bottom: "ConvNdBackward49" top: "BatchNormBackward50" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward50_scale" type: "Scale" bottom: "BatchNormBackward50" top: "BatchNormBackward50" scale_param { bias_term: true } } layer { name: "ThresholdBackward51" type: "ReLU" bottom: "BatchNormBackward50" top: "BatchNormBackward50" } layer { name: "ConvNdBackward52" type: "Convolution" bottom: "BatchNormBackward50" top: "ConvNdBackward52" convolution_param { num_output: 128 group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward53_bn" type: "BatchNorm" bottom: "ConvNdBackward52" top: "BatchNormBackward53" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward53_scale" type: "Scale" bottom: "BatchNormBackward53" top: "BatchNormBackward53" scale_param { bias_term: true } } layer { name: "ThresholdBackward54" type: "ReLU" bottom: "BatchNormBackward53" top: "BatchNormBackward53" } layer { name: "ConvNdBackward55" type: "Convolution" bottom: "BatchNormBackward53" top: "ConvNdBackward55" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward56_bn" type: "BatchNorm" bottom: "ConvNdBackward55" top: "BatchNormBackward56" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward56_scale" type: "Scale" bottom: "BatchNormBackward56" top: "BatchNormBackward56" scale_param { bias_term: true } } layer { name: "ThresholdBackward57" type: "ReLU" bottom: "BatchNormBackward56" top: "BatchNormBackward56" } layer { name: "ConvNdBackward58" type: "Convolution" bottom: "BatchNormBackward56" top: "ConvNdBackward58" convolution_param { num_output: 128 group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward59_bn" type: "BatchNorm" bottom: "ConvNdBackward58" top: "BatchNormBackward59" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward59_scale" type: "Scale" bottom: "BatchNormBackward59" top: "BatchNormBackward59" scale_param { bias_term: true } } layer { name: "ThresholdBackward60" type: "ReLU" bottom: "BatchNormBackward59" top: "BatchNormBackward59" } layer { name: "ConvNdBackward61" type: "Convolution" bottom: "BatchNormBackward59" top: "ConvNdBackward61" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward62_bn" type: "BatchNorm" bottom: "ConvNdBackward61" top: "BatchNormBackward62" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward62_scale" type: "Scale" bottom: "BatchNormBackward62" top: "BatchNormBackward62" scale_param { bias_term: true } } layer { name: "ThresholdBackward63" type: "ReLU" bottom: "BatchNormBackward62" top: "BatchNormBackward62" } layer { name: "ConvNdBackward64" type: "Convolution" bottom: "BatchNormBackward62" top: "ConvNdBackward64" convolution_param { num_output: 128 group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward65_bn" type: "BatchNorm" bottom: "ConvNdBackward64" top: "BatchNormBackward65" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward65_scale" type: "Scale" bottom: "BatchNormBackward65" top: "BatchNormBackward65" scale_param { bias_term: true } } layer { name: "ThresholdBackward66" type: "ReLU" bottom: "BatchNormBackward65" top: "BatchNormBackward65" } layer { name: "ConvNdBackward67" type: "Convolution" bottom: "BatchNormBackward65" top: "ConvNdBackward67" convolution_param { num_output: 128 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward68_bn" type: "BatchNorm" bottom: "ConvNdBackward67" top: "BatchNormBackward68" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward68_scale" type: "Scale" bottom: "BatchNormBackward68" top: "BatchNormBackward68" scale_param { bias_term: true } } layer { name: "ThresholdBackward69" type: "ReLU" bottom: "BatchNormBackward68" top: "BatchNormBackward68" } layer { name: "ConvNdBackward70" type: "Convolution" bottom: "BatchNormBackward68" top: "ConvNdBackward70" convolution_param { num_output: 128 group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 2 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward71_bn" type: "BatchNorm" bottom: "ConvNdBackward70" top: "BatchNormBackward71" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward71_scale" type: "Scale" bottom: "BatchNormBackward71" top: "BatchNormBackward71" scale_param { bias_term: true } } layer { name: "ThresholdBackward72" type: "ReLU" bottom: "BatchNormBackward71" top: "BatchNormBackward71" } layer { name: "ConvNdBackward73" type: "Convolution" bottom: "BatchNormBackward71" top: "ConvNdBackward73" convolution_param { num_output: 256 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward74_bn" type: "BatchNorm" bottom: "ConvNdBackward73" top: "BatchNormBackward74" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward74_scale" type: "Scale" bottom: "BatchNormBackward74" top: "BatchNormBackward74" scale_param { bias_term: true } } layer { name: "ThresholdBackward75" type: "ReLU" bottom: "BatchNormBackward74" top: "BatchNormBackward74" } layer { name: "ConvNdBackward76" type: "Convolution" bottom: "BatchNormBackward74" top: "ConvNdBackward76" convolution_param { num_output: 256 group: 256 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward77_bn" type: "BatchNorm" bottom: "ConvNdBackward76" top: "BatchNormBackward77" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward77_scale" type: "Scale" bottom: "BatchNormBackward77" top: "BatchNormBackward77" scale_param { bias_term: true } } layer { name: "ThresholdBackward78" type: "ReLU" bottom: "BatchNormBackward77" top: "BatchNormBackward77" } layer { name: "ConvNdBackward79" type: "Convolution" bottom: "BatchNormBackward77" top: "ConvNdBackward79" convolution_param { num_output: 256 group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride: 1 dilation: 1 bias_term: false } } layer { name: "BatchNormBackward80_bn" type: "BatchNorm" bottom: "ConvNdBackward79" top: "BatchNormBackward80" batch_norm_param { use_global_stats: true eps: 1e-05 } } layer { name: "BatchNormBackward80_scale" type: "Scale" bottom: "BatchNormBackward80" top: "BatchNormBackward80" scale_param { bias_term: true } } layer { name: "ThresholdBackward81" type: "ReLU" bottom: "BatchNormBackward80" top: "BatchNormBackward80" } layer { name: "AvgPool2dBackward82" type: "Pooling" bottom: "BatchNormBackward80" top: "AvgPool2dBackward82" pooling_param { pool: AVE kernel_size: 8 stride: 8 pad: 0 } } layer { name: "AddmmBackward83" type: "InnerProduct" bottom: "AvgPool2dBackward82" top: "AddmmBackward83" inner_product_param { num_output: 62 } }