layer { name: "input" type: "Input" top: "data" input_param { shape { dim: 1 dim: 1 dim: 32 dim: 512 } } } layer { name: "power-data" type: "Power" bottom: "data" top: "0" power_param { scale: 0.0039215686274 shift: -0.67 } } layer { name: "326_Conv" type: "Convolution" bottom: "0" top: "326" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "327_BatchNormalization_bn" type: "BatchNorm" bottom: "326" top: "327" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "327_BatchNormalization" type: "Scale" bottom: "327" top: "327" scale_param { bias_term: true } } layer { name: "328_Relu" type: "ReLU" bottom: "327" top: "328" } layer { name: "329_Conv" type: "Convolution" bottom: "328" top: "329" convolution_param { num_output: 32 bias_term: false group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "330_BatchNormalization_bn" type: "BatchNorm" bottom: "329" top: "330" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "330_BatchNormalization" type: "Scale" bottom: "330" top: "330" scale_param { bias_term: true } } layer { name: "331_Relu" type: "ReLU" bottom: "330" top: "331" } layer { name: "332_Conv" type: "Convolution" bottom: "331" top: "332" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "333_BatchNormalization_bn" type: "BatchNorm" bottom: "332" top: "333" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "333_BatchNormalization" type: "Scale" bottom: "333" top: "333" scale_param { bias_term: true } } layer { name: "334_Conv" type: "Convolution" bottom: "333" top: "334" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "335_BatchNormalization_bn" type: "BatchNorm" bottom: "334" top: "335" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "335_BatchNormalization" type: "Scale" bottom: "335" top: "335" scale_param { bias_term: true } } layer { name: "336_Relu" type: "ReLU" bottom: "335" top: "336" } layer { name: "337_Conv" type: "Convolution" bottom: "336" top: "337" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "338_BatchNormalization_bn" type: "BatchNorm" bottom: "337" top: "338" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "338_BatchNormalization" type: "Scale" bottom: "338" top: "338" scale_param { bias_term: true } } layer { name: "339_Relu" type: "ReLU" bottom: "338" top: "339" } layer { name: "340_Conv" type: "Convolution" bottom: "339" top: "340" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "341_BatchNormalization_bn" type: "BatchNorm" bottom: "340" top: "341" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "341_BatchNormalization" type: "Scale" bottom: "341" top: "341" scale_param { bias_term: true } } layer { name: "342_Conv" type: "Convolution" bottom: "341" top: "342" convolution_param { num_output: 72 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "343_BatchNormalization_bn" type: "BatchNorm" bottom: "342" top: "343" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "343_BatchNormalization" type: "Scale" bottom: "343" top: "343" scale_param { bias_term: true } } layer { name: "344_Relu" type: "ReLU" bottom: "343" top: "344" } layer { name: "345_Conv" type: "Convolution" bottom: "344" top: "345" convolution_param { num_output: 72 bias_term: false group: 72 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "346_BatchNormalization_bn" type: "BatchNorm" bottom: "345" top: "346" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "346_BatchNormalization" type: "Scale" bottom: "346" top: "346" scale_param { bias_term: true } } layer { name: "347_Relu" type: "ReLU" bottom: "346" top: "347" } layer { name: "348_Conv" type: "Convolution" bottom: "347" top: "348" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "349_BatchNormalization_bn" type: "BatchNorm" bottom: "348" top: "349" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "349_BatchNormalization" type: "Scale" bottom: "349" top: "349" scale_param { bias_term: true } } layer { name: "350_Add" type: "Eltwise" bottom: "349" bottom: "341" top: "350" eltwise_param { operation: SUM } } layer { name: "351_Conv" type: "Convolution" bottom: "350" top: "351" convolution_param { num_output: 72 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "352_BatchNormalization_bn" type: "BatchNorm" bottom: "351" top: "352" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "352_BatchNormalization" type: "Scale" bottom: "352" top: "352" scale_param { bias_term: true } } layer { name: "353_Relu" type: "ReLU" bottom: "352" top: "353" } layer { name: "354_Conv" type: "Convolution" bottom: "353" top: "354" convolution_param { num_output: 72 bias_term: false group: 72 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "355_BatchNormalization_bn" type: "BatchNorm" bottom: "354" top: "355" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "355_BatchNormalization" type: "Scale" bottom: "355" top: "355" scale_param { bias_term: true } } layer { name: "356_Relu" type: "ReLU" bottom: "355" top: "356" } layer { name: "357_Conv" type: "Convolution" bottom: "356" top: "357" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "358_BatchNormalization_bn" type: "BatchNorm" bottom: "357" top: "358" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "358_BatchNormalization" type: "Scale" bottom: "358" top: "358" scale_param { bias_term: true } } layer { name: "359_Add" type: "Eltwise" bottom: "358" bottom: "350" top: "359" eltwise_param { operation: SUM } } layer { name: "360_Conv" type: "Convolution" bottom: "359" top: "360" convolution_param { num_output: 72 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "361_BatchNormalization_bn" type: "BatchNorm" bottom: "360" top: "361" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "361_BatchNormalization" type: "Scale" bottom: "361" top: "361" scale_param { bias_term: true } } layer { name: "362_Relu" type: "ReLU" bottom: "361" top: "362" } layer { name: "363_Conv" type: "Convolution" bottom: "362" top: "363" convolution_param { num_output: 72 bias_term: false group: 72 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "364_BatchNormalization_bn" type: "BatchNorm" bottom: "363" top: "364" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "364_BatchNormalization" type: "Scale" bottom: "364" top: "364" scale_param { bias_term: true } } layer { name: "365_Relu" type: "ReLU" bottom: "364" top: "365" } layer { name: "366_Conv" type: "Convolution" bottom: "365" top: "366" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "367_BatchNormalization_bn" type: "BatchNorm" bottom: "366" top: "367" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "367_BatchNormalization" type: "Scale" bottom: "367" top: "367" scale_param { bias_term: true } } layer { name: "368_Conv" type: "Convolution" bottom: "367" top: "368" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "369_BatchNormalization_bn" type: "BatchNorm" bottom: "368" top: "369" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "369_BatchNormalization" type: "Scale" bottom: "369" top: "369" scale_param { bias_term: true } } layer { name: "370_Relu" type: "ReLU" bottom: "369" top: "370" } layer { name: "371_Conv" type: "Convolution" bottom: "370" top: "371" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "372_BatchNormalization_bn" type: "BatchNorm" bottom: "371" top: "372" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "372_BatchNormalization" type: "Scale" bottom: "372" top: "372" scale_param { bias_term: true } } layer { name: "373_Relu" type: "ReLU" bottom: "372" top: "373" } layer { name: "374_Conv" type: "Convolution" bottom: "373" top: "374" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "375_BatchNormalization_bn" type: "BatchNorm" bottom: "374" top: "375" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "375_BatchNormalization" type: "Scale" bottom: "375" top: "375" scale_param { bias_term: true } } layer { name: "376_Add" type: "Eltwise" bottom: "375" bottom: "367" top: "376" eltwise_param { operation: SUM } } layer { name: "377_Conv" type: "Convolution" bottom: "376" top: "377" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "378_BatchNormalization_bn" type: "BatchNorm" bottom: "377" top: "378" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "378_BatchNormalization" type: "Scale" bottom: "378" top: "378" scale_param { bias_term: true } } layer { name: "379_Relu" type: "ReLU" bottom: "378" top: "379" } layer { name: "380_Conv" type: "Convolution" bottom: "379" top: "380" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "381_BatchNormalization_bn" type: "BatchNorm" bottom: "380" top: "381" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "381_BatchNormalization" type: "Scale" bottom: "381" top: "381" scale_param { bias_term: true } } layer { name: "382_Relu" type: "ReLU" bottom: "381" top: "382" } layer { name: "383_Conv" type: "Convolution" bottom: "382" top: "383" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "384_BatchNormalization_bn" type: "BatchNorm" bottom: "383" top: "384" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "384_BatchNormalization" type: "Scale" bottom: "384" top: "384" scale_param { bias_term: true } } layer { name: "385_Add" type: "Eltwise" bottom: "384" bottom: "376" top: "385" eltwise_param { operation: SUM } } layer { name: "386_Conv" type: "Convolution" bottom: "385" top: "386" convolution_param { num_output: 192 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "387_BatchNormalization_bn" type: "BatchNorm" bottom: "386" top: "387" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "387_BatchNormalization" type: "Scale" bottom: "387" top: "387" scale_param { bias_term: true } } layer { name: "388_Relu" type: "ReLU" bottom: "387" top: "388" } layer { name: "389_Conv" type: "Convolution" bottom: "388" top: "389" convolution_param { num_output: 192 bias_term: false group: 192 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 2 stride_w: 1 dilation: 1 } } layer { name: "390_BatchNormalization_bn" type: "BatchNorm" bottom: "389" top: "390" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "390_BatchNormalization" type: "Scale" bottom: "390" top: "390" scale_param { bias_term: true } } layer { name: "391_Relu" type: "ReLU" bottom: "390" top: "391" } layer { name: "392_Conv" type: "Convolution" bottom: "391" top: "392" convolution_param { num_output: 48 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "393_BatchNormalization_bn" type: "BatchNorm" bottom: "392" top: "393" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "393_BatchNormalization" type: "Scale" bottom: "393" top: "393" scale_param { bias_term: true } } layer { name: "394_Conv" type: "Convolution" bottom: "393" top: "394" convolution_param { num_output: 288 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "395_BatchNormalization_bn" type: "BatchNorm" bottom: "394" top: "395" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "395_BatchNormalization" type: "Scale" bottom: "395" top: "395" scale_param { bias_term: true } } layer { name: "396_Relu" type: "ReLU" bottom: "395" top: "396" } layer { name: "397_Conv" type: "Convolution" bottom: "396" top: "397" convolution_param { num_output: 288 bias_term: false group: 288 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "398_BatchNormalization_bn" type: "BatchNorm" bottom: "397" top: "398" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "398_BatchNormalization" type: "Scale" bottom: "398" top: "398" scale_param { bias_term: true } } layer { name: "399_Relu" type: "ReLU" bottom: "398" top: "399" } layer { name: "400_Conv" type: "Convolution" bottom: "399" top: "400" convolution_param { num_output: 48 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "401_BatchNormalization_bn" type: "BatchNorm" bottom: "400" top: "401" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "401_BatchNormalization" type: "Scale" bottom: "401" top: "401" scale_param { bias_term: true } } layer { name: "402_Add" type: "Eltwise" bottom: "401" bottom: "393" top: "402" eltwise_param { operation: SUM } } layer { name: "403_Conv" type: "Convolution" bottom: "402" top: "403" convolution_param { num_output: 288 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "404_BatchNormalization_bn" type: "BatchNorm" bottom: "403" top: "404" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "404_BatchNormalization" type: "Scale" bottom: "404" top: "404" scale_param { bias_term: true } } layer { name: "405_Relu" type: "ReLU" bottom: "404" top: "405" } layer { name: "406_Conv" type: "Convolution" bottom: "405" top: "406" convolution_param { num_output: 288 bias_term: false group: 288 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "407_BatchNormalization_bn" type: "BatchNorm" bottom: "406" top: "407" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "407_BatchNormalization" type: "Scale" bottom: "407" top: "407" scale_param { bias_term: true } } layer { name: "408_Relu" type: "ReLU" bottom: "407" top: "408" } layer { name: "409_Conv" type: "Convolution" bottom: "408" top: "409" convolution_param { num_output: 48 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "410_BatchNormalization_bn" type: "BatchNorm" bottom: "409" top: "410" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "410_BatchNormalization" type: "Scale" bottom: "410" top: "410" scale_param { bias_term: true } } layer { name: "411_Add" type: "Eltwise" bottom: "410" bottom: "402" top: "411" eltwise_param { operation: SUM } } layer { name: "412_Conv" type: "Convolution" bottom: "411" top: "412" convolution_param { num_output: 288 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "413_BatchNormalization_bn" type: "BatchNorm" bottom: "412" top: "413" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "413_BatchNormalization" type: "Scale" bottom: "413" top: "413" scale_param { bias_term: true } } layer { name: "414_Relu" type: "ReLU" bottom: "413" top: "414" } layer { name: "415_Conv" type: "Convolution" bottom: "414" top: "415" convolution_param { num_output: 288 bias_term: false group: 288 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "416_BatchNormalization_bn" type: "BatchNorm" bottom: "415" top: "416" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "416_BatchNormalization" type: "Scale" bottom: "416" top: "416" scale_param { bias_term: true } } layer { name: "417_Relu" type: "ReLU" bottom: "416" top: "417" } layer { name: "418_Conv" type: "Convolution" bottom: "417" top: "418" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "419_BatchNormalization_bn" type: "BatchNorm" bottom: "418" top: "419" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "419_BatchNormalization" type: "Scale" bottom: "419" top: "419" scale_param { bias_term: true } } layer { name: "420_Conv" type: "Convolution" bottom: "419" top: "420" convolution_param { num_output: 576 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "421_BatchNormalization_bn" type: "BatchNorm" bottom: "420" top: "421" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "421_BatchNormalization" type: "Scale" bottom: "421" top: "421" scale_param { bias_term: true } } layer { name: "422_Relu" type: "ReLU" bottom: "421" top: "422" } layer { name: "423_Conv" type: "Convolution" bottom: "422" top: "423" convolution_param { num_output: 576 bias_term: false group: 576 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "424_BatchNormalization_bn" type: "BatchNorm" bottom: "423" top: "424" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "424_BatchNormalization" type: "Scale" bottom: "424" top: "424" scale_param { bias_term: true } } layer { name: "425_Relu" type: "ReLU" bottom: "424" top: "425" } layer { name: "426_Conv" type: "Convolution" bottom: "425" top: "426" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "427_BatchNormalization_bn" type: "BatchNorm" bottom: "426" top: "427" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "427_BatchNormalization" type: "Scale" bottom: "427" top: "427" scale_param { bias_term: true } } layer { name: "428_Add" type: "Eltwise" bottom: "427" bottom: "419" top: "428" eltwise_param { operation: SUM } } layer { name: "429_Conv" type: "Convolution" bottom: "428" top: "429" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "430_BatchNormalization_bn" type: "BatchNorm" bottom: "429" top: "430" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "430_BatchNormalization" type: "Scale" bottom: "430" top: "430" scale_param { bias_term: true } } layer { name: "431_Relu" type: "ReLU" bottom: "430" top: "431" } layer { name: "432_Conv" type: "Convolution" bottom: "431" top: "432" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 2 stride_w: 1 dilation: 1 } } layer { name: "433_BatchNormalization_bn" type: "BatchNorm" bottom: "432" top: "433" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "433_BatchNormalization" type: "Scale" bottom: "433" top: "433" scale_param { bias_term: true } } layer { name: "434_Relu" type: "ReLU" bottom: "433" top: "434" } layer { name: "435_Conv" type: "Convolution" bottom: "434" top: "435" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "436_BatchNormalization_bn" type: "BatchNorm" bottom: "435" top: "436" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "436_BatchNormalization" type: "Scale" bottom: "436" top: "436" scale_param { bias_term: true } } layer { name: "437_Conv" type: "Convolution" bottom: "436" top: "437" convolution_param { num_output: 512 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "438_BatchNormalization_bn" type: "BatchNorm" bottom: "437" top: "438" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "438_BatchNormalization" type: "Scale" bottom: "438" top: "438" scale_param { bias_term: true } } layer { name: "439_Relu" type: "ReLU" bottom: "438" top: "439" } layer { name: "440_Conv" type: "Convolution" bottom: "439" top: "440" convolution_param { num_output: 512 bias_term: false group: 512 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "441_BatchNormalization_bn" type: "BatchNorm" bottom: "440" top: "441" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "441_BatchNormalization" type: "Scale" bottom: "441" top: "441" scale_param { bias_term: true } } layer { name: "442_Relu" type: "ReLU" bottom: "441" top: "442" } layer { name: "443_Conv" type: "Convolution" bottom: "442" top: "443" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "444_BatchNormalization_bn" type: "BatchNorm" bottom: "443" top: "444" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "444_BatchNormalization" type: "Scale" bottom: "444" top: "444" scale_param { bias_term: true } } layer { name: "445_Add" type: "Eltwise" bottom: "444" bottom: "436" top: "445" eltwise_param { operation: SUM } } layer { name: "446_Conv" type: "Convolution" bottom: "445" top: "446" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "447_BatchNormalization_bn" type: "BatchNorm" bottom: "446" top: "447" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "447_BatchNormalization" type: "Scale" bottom: "447" top: "447" scale_param { bias_term: true } } layer { name: "448_Relu" type: "ReLU" bottom: "447" top: "448" } layer { name: "449_Conv" type: "Convolution" bottom: "448" top: "449" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 0 pad_w: 1 kernel_h: 1 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "450_BatchNormalization_bn" type: "BatchNorm" bottom: "449" top: "450" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "450_BatchNormalization" type: "Scale" bottom: "450" top: "450" scale_param { bias_term: true } } layer { name: "451_Relu" type: "ReLU" bottom: "450" top: "451" } layer { name: "452_Conv" type: "Convolution" bottom: "451" top: "452" convolution_param { num_output: 152 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "453_BatchNormalization_bn" type: "BatchNorm" bottom: "452" top: "453" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "453_BatchNormalization" type: "Scale" bottom: "453" top: "453" scale_param { bias_term: true } } layer { name: "454_Conv" type: "Convolution" bottom: "453" top: "454" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "455_BatchNormalization_bn" type: "BatchNorm" bottom: "454" top: "455" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "455_BatchNormalization" type: "Scale" bottom: "455" top: "455" scale_param { bias_term: true } } layer { name: "456_Relu" type: "ReLU" bottom: "455" top: "456" } layer { name: "457_Conv" type: "Convolution" bottom: "456" top: "457" convolution_param { num_output: 96 bias_term: true group: 1 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "458_BatchNormalization_bn" type: "BatchNorm" bottom: "457" top: "458" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "458_BatchNormalization" type: "Scale" bottom: "458" top: "458" scale_param { bias_term: true } } layer { name: "459_Relu" type: "ReLU" bottom: "458" top: "459" } layer { name: "460_Conv" type: "Convolution" bottom: "456" top: "460" convolution_param { num_output: 96 bias_term: true group: 1 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "461_BatchNormalization_bn" type: "BatchNorm" bottom: "460" top: "461" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "461_BatchNormalization" type: "Scale" bottom: "461" top: "461" scale_param { bias_term: true } } layer { name: "462_Relu" type: "ReLU" bottom: "461" top: "462" } layer { name: "463_Sigmoid" type: "Sigmoid" bottom: "462" top: "463" } layer { name: "464_Mul" type: "Eltwise" bottom: "459" bottom: "463" top: "464" eltwise_param { operation: PROD } } layer { name: "465_Add" type: "Eltwise" bottom: "456" bottom: "464" top: "465" eltwise_param { operation: SUM } } layer { name: "466_Conv" type: "Convolution" bottom: "465" top: "466" convolution_param { num_output: 10 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "467_BatchNormalization_bn" type: "BatchNorm" bottom: "466" top: "467" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "467_BatchNormalization" type: "Scale" bottom: "467" top: "467" scale_param { bias_term: true } } layer { name: "468_Conv" type: "Convolution" bottom: "465" top: "468" convolution_param { num_output: 84 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "469_BatchNormalization_bn" type: "BatchNorm" bottom: "468" top: "469" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "469_BatchNormalization" type: "Scale" bottom: "469" top: "469" scale_param { bias_term: true } } layer { name: "470_Conv" type: "Convolution" bottom: "465" top: "470" convolution_param { num_output: 2275 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "471_BatchNormalization_bn" type: "BatchNorm" bottom: "470" top: "471" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "471_BatchNormalization" type: "Scale" bottom: "471" top: "471" scale_param { bias_term: true } } layer { name: "472_Conv" type: "Convolution" bottom: "465" top: "472" convolution_param { num_output: 1769 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "473_BatchNormalization_bn" type: "BatchNorm" bottom: "472" top: "473" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "473_BatchNormalization" type: "Scale" bottom: "473" top: "473" scale_param { bias_term: true } } layer { name: "474_Conv" type: "Convolution" bottom: "465" top: "474" convolution_param { num_output: 2469 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "475_BatchNormalization_bn" type: "BatchNorm" bottom: "474" top: "475" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "475_BatchNormalization" type: "Scale" bottom: "475" top: "475" scale_param { bias_term: true } } layer { name: "476_Conv" type: "Convolution" bottom: "465" top: "476" convolution_param { num_output: 1592 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "477_BatchNormalization_bn" type: "BatchNorm" bottom: "476" top: "477" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "477_BatchNormalization" type: "Scale" bottom: "477" top: "477" scale_param { bias_term: true } } layer { name: "478_Conv" type: "Convolution" bottom: "465" top: "478" convolution_param { num_output: 226 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "479_BatchNormalization_bn" type: "BatchNorm" bottom: "478" top: "479" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "479_BatchNormalization" type: "Scale" bottom: "479" top: "479" scale_param { bias_term: true } } layer { name: "480_Concat" type: "Concat" bottom: "467" bottom: "469" bottom: "471" bottom: "473" bottom: "475" bottom: "477" bottom: "479" top: "480" concat_param { axis: 1 } } layer { name:"ArgMax" type: "ArgMax" bottom: "480" top: "ArgMax" argmax_param { axis: 1 } }