layer { name: "input" type: "Input" top: "data" input_param { shape { dim: 1 dim: 1 dim: 32 dim: 512 } } } layer { name: "power-data" type: "Power" bottom: "data" top: "0" power_param { scale: 0.0039215686274 shift: -0.67 } } layer { name: "308_Conv" type: "Convolution" bottom: "0" top: "308" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "309_BatchNormalization_bn" type: "BatchNorm" bottom: "308" top: "309" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "309_BatchNormalization" type: "Scale" bottom: "309" top: "309" scale_param { bias_term: true } } layer { name: "310_Relu" type: "ReLU" bottom: "309" top: "310" } layer { name: "311_Conv" type: "Convolution" bottom: "310" top: "311" convolution_param { num_output: 32 bias_term: false group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "312_BatchNormalization_bn" type: "BatchNorm" bottom: "311" top: "312" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "312_BatchNormalization" type: "Scale" bottom: "312" top: "312" scale_param { bias_term: true } } layer { name: "313_Relu" type: "ReLU" bottom: "312" top: "313" } layer { name: "314_Conv" type: "Convolution" bottom: "313" top: "314" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "315_BatchNormalization_bn" type: "BatchNorm" bottom: "314" top: "315" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "315_BatchNormalization" type: "Scale" bottom: "315" top: "315" scale_param { bias_term: true } } layer { name: "316_Conv" type: "Convolution" bottom: "315" top: "316" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "317_BatchNormalization_bn" type: "BatchNorm" bottom: "316" top: "317" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "317_BatchNormalization" type: "Scale" bottom: "317" top: "317" scale_param { bias_term: true } } layer { name: "318_Relu" type: "ReLU" bottom: "317" top: "318" } layer { name: "319_Conv" type: "Convolution" bottom: "318" top: "319" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "320_BatchNormalization_bn" type: "BatchNorm" bottom: "319" top: "320" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "320_BatchNormalization" type: "Scale" bottom: "320" top: "320" scale_param { bias_term: true } } layer { name: "321_Relu" type: "ReLU" bottom: "320" top: "321" } layer { name: "322_Conv" type: "Convolution" bottom: "321" top: "322" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "323_BatchNormalization_bn" type: "BatchNorm" bottom: "322" top: "323" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "323_BatchNormalization" type: "Scale" bottom: "323" top: "323" scale_param { bias_term: true } } layer { name: "324_Conv" type: "Convolution" bottom: "323" top: "324" convolution_param { num_output: 72 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "325_BatchNormalization_bn" type: "BatchNorm" bottom: "324" top: "325" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "325_BatchNormalization" type: "Scale" bottom: "325" top: "325" scale_param { bias_term: true } } layer { name: "326_Relu" type: "ReLU" bottom: "325" top: "326" } layer { name: "327_Conv" type: "Convolution" bottom: "326" top: "327" convolution_param { num_output: 72 bias_term: false group: 72 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "328_BatchNormalization_bn" type: "BatchNorm" bottom: "327" top: "328" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "328_BatchNormalization" type: "Scale" bottom: "328" top: "328" scale_param { bias_term: true } } layer { name: "329_Relu" type: "ReLU" bottom: "328" top: "329" } layer { name: "330_Conv" type: "Convolution" bottom: "329" top: "330" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "331_BatchNormalization_bn" type: "BatchNorm" bottom: "330" top: "331" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "331_BatchNormalization" type: "Scale" bottom: "331" top: "331" scale_param { bias_term: true } } layer { name: "332_Add" type: "Eltwise" bottom: "331" bottom: "323" top: "332" eltwise_param { operation: SUM } } layer { name: "333_Conv" type: "Convolution" bottom: "332" top: "333" convolution_param { num_output: 72 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "334_BatchNormalization_bn" type: "BatchNorm" bottom: "333" top: "334" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "334_BatchNormalization" type: "Scale" bottom: "334" top: "334" scale_param { bias_term: true } } layer { name: "335_Relu" type: "ReLU" bottom: "334" top: "335" } layer { name: "336_Conv" type: "Convolution" bottom: "335" top: "336" convolution_param { num_output: 72 bias_term: false group: 72 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "337_BatchNormalization_bn" type: "BatchNorm" bottom: "336" top: "337" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "337_BatchNormalization" type: "Scale" bottom: "337" top: "337" scale_param { bias_term: true } } layer { name: "338_Relu" type: "ReLU" bottom: "337" top: "338" } layer { name: "339_Conv" type: "Convolution" bottom: "338" top: "339" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "340_BatchNormalization_bn" type: "BatchNorm" bottom: "339" top: "340" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "340_BatchNormalization" type: "Scale" bottom: "340" top: "340" scale_param { bias_term: true } } layer { name: "341_Add" type: "Eltwise" bottom: "340" bottom: "332" top: "341" eltwise_param { operation: SUM } } layer { name: "342_Conv" type: "Convolution" bottom: "341" top: "342" convolution_param { num_output: 72 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "343_BatchNormalization_bn" type: "BatchNorm" bottom: "342" top: "343" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "343_BatchNormalization" type: "Scale" bottom: "343" top: "343" scale_param { bias_term: true } } layer { name: "344_Relu" type: "ReLU" bottom: "343" top: "344" } layer { name: "345_Conv" type: "Convolution" bottom: "344" top: "345" convolution_param { num_output: 72 bias_term: false group: 72 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 2 stride_w: 1 dilation: 1 } } layer { name: "346_BatchNormalization_bn" type: "BatchNorm" bottom: "345" top: "346" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "346_BatchNormalization" type: "Scale" bottom: "346" top: "346" scale_param { bias_term: true } } layer { name: "347_Relu" type: "ReLU" bottom: "346" top: "347" } layer { name: "348_Conv" type: "Convolution" bottom: "347" top: "348" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "349_BatchNormalization_bn" type: "BatchNorm" bottom: "348" top: "349" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "349_BatchNormalization" type: "Scale" bottom: "349" top: "349" scale_param { bias_term: true } } layer { name: "350_Conv" type: "Convolution" bottom: "349" top: "350" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "351_BatchNormalization_bn" type: "BatchNorm" bottom: "350" top: "351" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "351_BatchNormalization" type: "Scale" bottom: "351" top: "351" scale_param { bias_term: true } } layer { name: "352_Relu" type: "ReLU" bottom: "351" top: "352" } layer { name: "353_Conv" type: "Convolution" bottom: "352" top: "353" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "354_BatchNormalization_bn" type: "BatchNorm" bottom: "353" top: "354" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "354_BatchNormalization" type: "Scale" bottom: "354" top: "354" scale_param { bias_term: true } } layer { name: "355_Relu" type: "ReLU" bottom: "354" top: "355" } layer { name: "356_Conv" type: "Convolution" bottom: "355" top: "356" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "357_BatchNormalization_bn" type: "BatchNorm" bottom: "356" top: "357" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "357_BatchNormalization" type: "Scale" bottom: "357" top: "357" scale_param { bias_term: true } } layer { name: "358_Add" type: "Eltwise" bottom: "357" bottom: "349" top: "358" eltwise_param { operation: SUM } } layer { name: "359_Conv" type: "Convolution" bottom: "358" top: "359" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "360_BatchNormalization_bn" type: "BatchNorm" bottom: "359" top: "360" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "360_BatchNormalization" type: "Scale" bottom: "360" top: "360" scale_param { bias_term: true } } layer { name: "361_Relu" type: "ReLU" bottom: "360" top: "361" } layer { name: "362_Conv" type: "Convolution" bottom: "361" top: "362" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "363_BatchNormalization_bn" type: "BatchNorm" bottom: "362" top: "363" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "363_BatchNormalization" type: "Scale" bottom: "363" top: "363" scale_param { bias_term: true } } layer { name: "364_Relu" type: "ReLU" bottom: "363" top: "364" } layer { name: "365_Conv" type: "Convolution" bottom: "364" top: "365" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "366_BatchNormalization_bn" type: "BatchNorm" bottom: "365" top: "366" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "366_BatchNormalization" type: "Scale" bottom: "366" top: "366" scale_param { bias_term: true } } layer { name: "367_Add" type: "Eltwise" bottom: "366" bottom: "358" top: "367" eltwise_param { operation: SUM } } layer { name: "368_Conv" type: "Convolution" bottom: "367" top: "368" convolution_param { num_output: 192 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "369_BatchNormalization_bn" type: "BatchNorm" bottom: "368" top: "369" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "369_BatchNormalization" type: "Scale" bottom: "369" top: "369" scale_param { bias_term: true } } layer { name: "370_Relu" type: "ReLU" bottom: "369" top: "370" } layer { name: "371_Conv" type: "Convolution" bottom: "370" top: "371" convolution_param { num_output: 192 bias_term: false group: 192 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 2 stride_w: 1 dilation: 1 } } layer { name: "372_BatchNormalization_bn" type: "BatchNorm" bottom: "371" top: "372" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "372_BatchNormalization" type: "Scale" bottom: "372" top: "372" scale_param { bias_term: true } } layer { name: "373_Relu" type: "ReLU" bottom: "372" top: "373" } layer { name: "374_Conv" type: "Convolution" bottom: "373" top: "374" convolution_param { num_output: 48 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "375_BatchNormalization_bn" type: "BatchNorm" bottom: "374" top: "375" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "375_BatchNormalization" type: "Scale" bottom: "375" top: "375" scale_param { bias_term: true } } layer { name: "376_Conv" type: "Convolution" bottom: "375" top: "376" convolution_param { num_output: 288 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "377_BatchNormalization_bn" type: "BatchNorm" bottom: "376" top: "377" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "377_BatchNormalization" type: "Scale" bottom: "377" top: "377" scale_param { bias_term: true } } layer { name: "378_Relu" type: "ReLU" bottom: "377" top: "378" } layer { name: "379_Conv" type: "Convolution" bottom: "378" top: "379" convolution_param { num_output: 288 bias_term: false group: 288 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "380_BatchNormalization_bn" type: "BatchNorm" bottom: "379" top: "380" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "380_BatchNormalization" type: "Scale" bottom: "380" top: "380" scale_param { bias_term: true } } layer { name: "381_Relu" type: "ReLU" bottom: "380" top: "381" } layer { name: "382_Conv" type: "Convolution" bottom: "381" top: "382" convolution_param { num_output: 48 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "383_BatchNormalization_bn" type: "BatchNorm" bottom: "382" top: "383" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "383_BatchNormalization" type: "Scale" bottom: "383" top: "383" scale_param { bias_term: true } } layer { name: "384_Add" type: "Eltwise" bottom: "383" bottom: "375" top: "384" eltwise_param { operation: SUM } } layer { name: "385_Conv" type: "Convolution" bottom: "384" top: "385" convolution_param { num_output: 288 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "386_BatchNormalization_bn" type: "BatchNorm" bottom: "385" top: "386" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "386_BatchNormalization" type: "Scale" bottom: "386" top: "386" scale_param { bias_term: true } } layer { name: "387_Relu" type: "ReLU" bottom: "386" top: "387" } layer { name: "388_Conv" type: "Convolution" bottom: "387" top: "388" convolution_param { num_output: 288 bias_term: false group: 288 pad_h: 2 pad_w: 2 kernel_h: 5 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "389_BatchNormalization_bn" type: "BatchNorm" bottom: "388" top: "389" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "389_BatchNormalization" type: "Scale" bottom: "389" top: "389" scale_param { bias_term: true } } layer { name: "390_Relu" type: "ReLU" bottom: "389" top: "390" } layer { name: "391_Conv" type: "Convolution" bottom: "390" top: "391" convolution_param { num_output: 48 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "392_BatchNormalization_bn" type: "BatchNorm" bottom: "391" top: "392" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "392_BatchNormalization" type: "Scale" bottom: "392" top: "392" scale_param { bias_term: true } } layer { name: "393_Add" type: "Eltwise" bottom: "392" bottom: "384" top: "393" eltwise_param { operation: SUM } } layer { name: "394_Conv" type: "Convolution" bottom: "393" top: "394" convolution_param { num_output: 288 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "395_BatchNormalization_bn" type: "BatchNorm" bottom: "394" top: "395" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "395_BatchNormalization" type: "Scale" bottom: "395" top: "395" scale_param { bias_term: true } } layer { name: "396_Relu" type: "ReLU" bottom: "395" top: "396" } layer { name: "397_Conv" type: "Convolution" bottom: "396" top: "397" convolution_param { num_output: 288 bias_term: false group: 288 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "398_BatchNormalization_bn" type: "BatchNorm" bottom: "397" top: "398" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "398_BatchNormalization" type: "Scale" bottom: "398" top: "398" scale_param { bias_term: true } } layer { name: "399_Relu" type: "ReLU" bottom: "398" top: "399" } layer { name: "400_Conv" type: "Convolution" bottom: "399" top: "400" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "401_BatchNormalization_bn" type: "BatchNorm" bottom: "400" top: "401" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "401_BatchNormalization" type: "Scale" bottom: "401" top: "401" scale_param { bias_term: true } } layer { name: "402_Conv" type: "Convolution" bottom: "401" top: "402" convolution_param { num_output: 576 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "403_BatchNormalization_bn" type: "BatchNorm" bottom: "402" top: "403" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "403_BatchNormalization" type: "Scale" bottom: "403" top: "403" scale_param { bias_term: true } } layer { name: "404_Relu" type: "ReLU" bottom: "403" top: "404" } layer { name: "405_Conv" type: "Convolution" bottom: "404" top: "405" convolution_param { num_output: 576 bias_term: false group: 576 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "406_BatchNormalization_bn" type: "BatchNorm" bottom: "405" top: "406" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "406_BatchNormalization" type: "Scale" bottom: "406" top: "406" scale_param { bias_term: true } } layer { name: "407_Relu" type: "ReLU" bottom: "406" top: "407" } layer { name: "408_Conv" type: "Convolution" bottom: "407" top: "408" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "409_BatchNormalization_bn" type: "BatchNorm" bottom: "408" top: "409" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "409_BatchNormalization" type: "Scale" bottom: "409" top: "409" scale_param { bias_term: true } } layer { name: "410_Add" type: "Eltwise" bottom: "409" bottom: "401" top: "410" eltwise_param { operation: SUM } } layer { name: "411_Conv" type: "Convolution" bottom: "410" top: "411" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "412_BatchNormalization_bn" type: "BatchNorm" bottom: "411" top: "412" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "412_BatchNormalization" type: "Scale" bottom: "412" top: "412" scale_param { bias_term: true } } layer { name: "413_Relu" type: "ReLU" bottom: "412" top: "413" } layer { name: "414_Conv" type: "Convolution" bottom: "413" top: "414" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 2 stride_w: 1 dilation: 1 } } layer { name: "415_BatchNormalization_bn" type: "BatchNorm" bottom: "414" top: "415" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "415_BatchNormalization" type: "Scale" bottom: "415" top: "415" scale_param { bias_term: true } } layer { name: "416_Relu" type: "ReLU" bottom: "415" top: "416" } layer { name: "417_Conv" type: "Convolution" bottom: "416" top: "417" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "418_BatchNormalization_bn" type: "BatchNorm" bottom: "417" top: "418" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "418_BatchNormalization" type: "Scale" bottom: "418" top: "418" scale_param { bias_term: true } } layer { name: "419_Conv" type: "Convolution" bottom: "418" top: "419" convolution_param { num_output: 512 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "420_BatchNormalization_bn" type: "BatchNorm" bottom: "419" top: "420" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "420_BatchNormalization" type: "Scale" bottom: "420" top: "420" scale_param { bias_term: true } } layer { name: "421_Relu" type: "ReLU" bottom: "420" top: "421" } layer { name: "422_Conv" type: "Convolution" bottom: "421" top: "422" convolution_param { num_output: 512 bias_term: false group: 512 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "423_BatchNormalization_bn" type: "BatchNorm" bottom: "422" top: "423" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "423_BatchNormalization" type: "Scale" bottom: "423" top: "423" scale_param { bias_term: true } } layer { name: "424_Relu" type: "ReLU" bottom: "423" top: "424" } layer { name: "425_Conv" type: "Convolution" bottom: "424" top: "425" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "426_BatchNormalization_bn" type: "BatchNorm" bottom: "425" top: "426" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "426_BatchNormalization" type: "Scale" bottom: "426" top: "426" scale_param { bias_term: true } } layer { name: "427_Add" type: "Eltwise" bottom: "426" bottom: "418" top: "427" eltwise_param { operation: SUM } } layer { name: "428_Conv" type: "Convolution" bottom: "427" top: "428" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "429_BatchNormalization_bn" type: "BatchNorm" bottom: "428" top: "429" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "429_BatchNormalization" type: "Scale" bottom: "429" top: "429" scale_param { bias_term: true } } layer { name: "430_Relu" type: "ReLU" bottom: "429" top: "430" } layer { name: "431_Conv" type: "Convolution" bottom: "430" top: "431" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 0 pad_w: 1 kernel_h: 1 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "432_BatchNormalization_bn" type: "BatchNorm" bottom: "431" top: "432" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "432_BatchNormalization" type: "Scale" bottom: "432" top: "432" scale_param { bias_term: true } } layer { name: "433_Relu" type: "ReLU" bottom: "432" top: "433" } layer { name: "434_Conv" type: "Convolution" bottom: "433" top: "434" convolution_param { num_output: 152 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "435_BatchNormalization_bn" type: "BatchNorm" bottom: "434" top: "435" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "435_BatchNormalization" type: "Scale" bottom: "435" top: "435" scale_param { bias_term: true } } layer { name: "436_Conv" type: "Convolution" bottom: "435" top: "436" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "437_BatchNormalization_bn" type: "BatchNorm" bottom: "436" top: "437" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "437_BatchNormalization" type: "Scale" bottom: "437" top: "437" scale_param { bias_term: true } } layer { name: "438_Relu" type: "ReLU" bottom: "437" top: "438" } layer { name: "439_Conv" type: "Convolution" bottom: "438" top: "439" convolution_param { num_output: 96 bias_term: true group: 1 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "440_BatchNormalization_bn" type: "BatchNorm" bottom: "439" top: "440" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "440_BatchNormalization" type: "Scale" bottom: "440" top: "440" scale_param { bias_term: true } } layer { name: "441_Relu" type: "ReLU" bottom: "440" top: "441" } layer { name: "442_Conv" type: "Convolution" bottom: "438" top: "442" convolution_param { num_output: 96 bias_term: true group: 1 pad_h: 0 pad_w: 2 kernel_h: 1 kernel_w: 5 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "443_BatchNormalization_bn" type: "BatchNorm" bottom: "442" top: "443" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "443_BatchNormalization" type: "Scale" bottom: "443" top: "443" scale_param { bias_term: true } } layer { name: "444_Relu" type: "ReLU" bottom: "443" top: "444" } layer { name: "445_Sigmoid" type: "Sigmoid" bottom: "444" top: "445" } layer { name: "446_Mul" type: "Eltwise" bottom: "441" bottom: "445" top: "446" eltwise_param { operation: PROD } } layer { name: "447_Add" type: "Eltwise" bottom: "438" bottom: "446" top: "447" eltwise_param { operation: SUM } } layer { name: "448_Conv" type: "Convolution" bottom: "447" top: "448" convolution_param { num_output: 10 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "449_BatchNormalization_bn" type: "BatchNorm" bottom: "448" top: "449" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "449_BatchNormalization" type: "Scale" bottom: "449" top: "449" scale_param { bias_term: true } } layer { name: "450_Conv" type: "Convolution" bottom: "447" top: "450" convolution_param { num_output: 1 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "451_BatchNormalization_bn" type: "BatchNorm" bottom: "450" top: "451" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "451_BatchNormalization" type: "Scale" bottom: "451" top: "451" scale_param { bias_term: true } } layer { name: "452_Conv" type: "Convolution" bottom: "447" top: "452" convolution_param { num_output: 1 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "453_BatchNormalization_bn" type: "BatchNorm" bottom: "452" top: "453" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "453_BatchNormalization" type: "Scale" bottom: "453" top: "453" scale_param { bias_term: true } } layer { name: "454_Conv" type: "Convolution" bottom: "447" top: "454" convolution_param { num_output: 1 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "455_BatchNormalization_bn" type: "BatchNorm" bottom: "454" top: "455" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "455_BatchNormalization" type: "Scale" bottom: "455" top: "455" scale_param { bias_term: true } } layer { name: "456_Concat" type: "Concat" bottom: "449" bottom: "451" bottom: "453" bottom: "455" top: "456" concat_param { axis: 1 } } layer { name:"ArgMax" type: "ArgMax" bottom: "456" top: "ArgMax" argmax_param { axis: 1 } }