layer { name: "input.1" type: "Input" top: "input.1" input_param { shape { dim: 1 dim: 3 dim: 224 dim: 224 } } } layer { name: "319_Conv" type: "Convolution" bottom: "input.1" top: "319" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "320_BatchNormalization_bn" type: "BatchNorm" bottom: "319" top: "320" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "320_BatchNormalization" type: "Scale" bottom: "320" top: "320" scale_param { bias_term: true } } layer { name: "321_Relu" type: "ReLU" bottom: "320" top: "321" } layer { name: "322_Conv" type: "Convolution" bottom: "321" top: "322" convolution_param { num_output: 32 bias_term: false group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "323_BatchNormalization_bn" type: "BatchNorm" bottom: "322" top: "323" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "323_BatchNormalization" type: "Scale" bottom: "323" top: "323" scale_param { bias_term: true } } layer { name: "324_Relu" type: "ReLU" bottom: "323" top: "324" } layer { name: "325_Conv" type: "Convolution" bottom: "324" top: "325" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "326_BatchNormalization_bn" type: "BatchNorm" bottom: "325" top: "326" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "326_BatchNormalization" type: "Scale" bottom: "326" top: "326" scale_param { bias_term: true } } layer { name: "327_Conv" type: "Convolution" bottom: "326" top: "327" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "328_BatchNormalization_bn" type: "BatchNorm" bottom: "327" top: "328" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "328_BatchNormalization" type: "Scale" bottom: "328" top: "328" scale_param { bias_term: true } } layer { name: "329_Relu" type: "ReLU" bottom: "328" top: "329" } layer { name: "330_Conv" type: "Convolution" bottom: "329" top: "330" convolution_param { num_output: 96 bias_term: false group: 96 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "331_BatchNormalization_bn" type: "BatchNorm" bottom: "330" top: "331" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "331_BatchNormalization" type: "Scale" bottom: "331" top: "331" scale_param { bias_term: true } } layer { name: "332_Relu" type: "ReLU" bottom: "331" top: "332" } layer { name: "333_Conv" type: "Convolution" bottom: "332" top: "333" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "334_BatchNormalization_bn" type: "BatchNorm" bottom: "333" top: "334" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "334_BatchNormalization" type: "Scale" bottom: "334" top: "334" scale_param { bias_term: true } } layer { name: "335_Conv" type: "Convolution" bottom: "334" top: "335" convolution_param { num_output: 144 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "336_BatchNormalization_bn" type: "BatchNorm" bottom: "335" top: "336" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "336_BatchNormalization" type: "Scale" bottom: "336" top: "336" scale_param { bias_term: true } } layer { name: "337_Relu" type: "ReLU" bottom: "336" top: "337" } layer { name: "338_Conv" type: "Convolution" bottom: "337" top: "338" convolution_param { num_output: 144 bias_term: false group: 144 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "339_BatchNormalization_bn" type: "BatchNorm" bottom: "338" top: "339" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "339_BatchNormalization" type: "Scale" bottom: "339" top: "339" scale_param { bias_term: true } } layer { name: "340_Relu" type: "ReLU" bottom: "339" top: "340" } layer { name: "341_Conv" type: "Convolution" bottom: "340" top: "341" convolution_param { num_output: 24 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "342_BatchNormalization_bn" type: "BatchNorm" bottom: "341" top: "342" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "342_BatchNormalization" type: "Scale" bottom: "342" top: "342" scale_param { bias_term: true } } layer { name: "343_Add" type: "Eltwise" bottom: "334" bottom: "342" top: "343" eltwise_param { operation: SUM } } layer { name: "344_Conv" type: "Convolution" bottom: "343" top: "344" convolution_param { num_output: 144 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "345_BatchNormalization_bn" type: "BatchNorm" bottom: "344" top: "345" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "345_BatchNormalization" type: "Scale" bottom: "345" top: "345" scale_param { bias_term: true } } layer { name: "346_Relu" type: "ReLU" bottom: "345" top: "346" } layer { name: "347_Conv" type: "Convolution" bottom: "346" top: "347" convolution_param { num_output: 144 bias_term: false group: 144 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "348_BatchNormalization_bn" type: "BatchNorm" bottom: "347" top: "348" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "348_BatchNormalization" type: "Scale" bottom: "348" top: "348" scale_param { bias_term: true } } layer { name: "349_Relu" type: "ReLU" bottom: "348" top: "349" } layer { name: "350_Conv" type: "Convolution" bottom: "349" top: "350" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "351_BatchNormalization_bn" type: "BatchNorm" bottom: "350" top: "351" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "351_BatchNormalization" type: "Scale" bottom: "351" top: "351" scale_param { bias_term: true } } layer { name: "352_Conv" type: "Convolution" bottom: "351" top: "352" convolution_param { num_output: 192 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "353_BatchNormalization_bn" type: "BatchNorm" bottom: "352" top: "353" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "353_BatchNormalization" type: "Scale" bottom: "353" top: "353" scale_param { bias_term: true } } layer { name: "354_Relu" type: "ReLU" bottom: "353" top: "354" } layer { name: "355_Conv" type: "Convolution" bottom: "354" top: "355" convolution_param { num_output: 192 bias_term: false group: 192 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "356_BatchNormalization_bn" type: "BatchNorm" bottom: "355" top: "356" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "356_BatchNormalization" type: "Scale" bottom: "356" top: "356" scale_param { bias_term: true } } layer { name: "357_Relu" type: "ReLU" bottom: "356" top: "357" } layer { name: "358_Conv" type: "Convolution" bottom: "357" top: "358" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "359_BatchNormalization_bn" type: "BatchNorm" bottom: "358" top: "359" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "359_BatchNormalization" type: "Scale" bottom: "359" top: "359" scale_param { bias_term: true } } layer { name: "360_Add" type: "Eltwise" bottom: "351" bottom: "359" top: "360" eltwise_param { operation: SUM } } layer { name: "361_Conv" type: "Convolution" bottom: "360" top: "361" convolution_param { num_output: 192 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "362_BatchNormalization_bn" type: "BatchNorm" bottom: "361" top: "362" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "362_BatchNormalization" type: "Scale" bottom: "362" top: "362" scale_param { bias_term: true } } layer { name: "363_Relu" type: "ReLU" bottom: "362" top: "363" } layer { name: "364_Conv" type: "Convolution" bottom: "363" top: "364" convolution_param { num_output: 192 bias_term: false group: 192 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "365_BatchNormalization_bn" type: "BatchNorm" bottom: "364" top: "365" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "365_BatchNormalization" type: "Scale" bottom: "365" top: "365" scale_param { bias_term: true } } layer { name: "366_Relu" type: "ReLU" bottom: "365" top: "366" } layer { name: "367_Conv" type: "Convolution" bottom: "366" top: "367" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "368_BatchNormalization_bn" type: "BatchNorm" bottom: "367" top: "368" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "368_BatchNormalization" type: "Scale" bottom: "368" top: "368" scale_param { bias_term: true } } layer { name: "369_Add" type: "Eltwise" bottom: "360" bottom: "368" top: "369" eltwise_param { operation: SUM } } layer { name: "370_Conv" type: "Convolution" bottom: "369" top: "370" convolution_param { num_output: 192 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "371_BatchNormalization_bn" type: "BatchNorm" bottom: "370" top: "371" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "371_BatchNormalization" type: "Scale" bottom: "371" top: "371" scale_param { bias_term: true } } layer { name: "372_Relu" type: "ReLU" bottom: "371" top: "372" } layer { name: "373_Conv" type: "Convolution" bottom: "372" top: "373" convolution_param { num_output: 192 bias_term: false group: 192 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "374_BatchNormalization_bn" type: "BatchNorm" bottom: "373" top: "374" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "374_BatchNormalization" type: "Scale" bottom: "374" top: "374" scale_param { bias_term: true } } layer { name: "375_Relu" type: "ReLU" bottom: "374" top: "375" } layer { name: "376_Conv" type: "Convolution" bottom: "375" top: "376" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "377_BatchNormalization_bn" type: "BatchNorm" bottom: "376" top: "377" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "377_BatchNormalization" type: "Scale" bottom: "377" top: "377" scale_param { bias_term: true } } layer { name: "378_Conv" type: "Convolution" bottom: "377" top: "378" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "379_BatchNormalization_bn" type: "BatchNorm" bottom: "378" top: "379" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "379_BatchNormalization" type: "Scale" bottom: "379" top: "379" scale_param { bias_term: true } } layer { name: "380_Relu" type: "ReLU" bottom: "379" top: "380" } layer { name: "381_Conv" type: "Convolution" bottom: "380" top: "381" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "382_BatchNormalization_bn" type: "BatchNorm" bottom: "381" top: "382" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "382_BatchNormalization" type: "Scale" bottom: "382" top: "382" scale_param { bias_term: true } } layer { name: "383_Relu" type: "ReLU" bottom: "382" top: "383" } layer { name: "384_Conv" type: "Convolution" bottom: "383" top: "384" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "385_BatchNormalization_bn" type: "BatchNorm" bottom: "384" top: "385" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "385_BatchNormalization" type: "Scale" bottom: "385" top: "385" scale_param { bias_term: true } } layer { name: "386_Add" type: "Eltwise" bottom: "377" bottom: "385" top: "386" eltwise_param { operation: SUM } } layer { name: "387_Conv" type: "Convolution" bottom: "386" top: "387" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "388_BatchNormalization_bn" type: "BatchNorm" bottom: "387" top: "388" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "388_BatchNormalization" type: "Scale" bottom: "388" top: "388" scale_param { bias_term: true } } layer { name: "389_Relu" type: "ReLU" bottom: "388" top: "389" } layer { name: "390_Conv" type: "Convolution" bottom: "389" top: "390" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "391_BatchNormalization_bn" type: "BatchNorm" bottom: "390" top: "391" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "391_BatchNormalization" type: "Scale" bottom: "391" top: "391" scale_param { bias_term: true } } layer { name: "392_Relu" type: "ReLU" bottom: "391" top: "392" } layer { name: "393_Conv" type: "Convolution" bottom: "392" top: "393" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "394_BatchNormalization_bn" type: "BatchNorm" bottom: "393" top: "394" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "394_BatchNormalization" type: "Scale" bottom: "394" top: "394" scale_param { bias_term: true } } layer { name: "395_Add" type: "Eltwise" bottom: "386" bottom: "394" top: "395" eltwise_param { operation: SUM } } layer { name: "396_Conv" type: "Convolution" bottom: "395" top: "396" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "397_BatchNormalization_bn" type: "BatchNorm" bottom: "396" top: "397" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "397_BatchNormalization" type: "Scale" bottom: "397" top: "397" scale_param { bias_term: true } } layer { name: "398_Relu" type: "ReLU" bottom: "397" top: "398" } layer { name: "399_Conv" type: "Convolution" bottom: "398" top: "399" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "400_BatchNormalization_bn" type: "BatchNorm" bottom: "399" top: "400" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "400_BatchNormalization" type: "Scale" bottom: "400" top: "400" scale_param { bias_term: true } } layer { name: "401_Relu" type: "ReLU" bottom: "400" top: "401" } layer { name: "402_Conv" type: "Convolution" bottom: "401" top: "402" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "403_BatchNormalization_bn" type: "BatchNorm" bottom: "402" top: "403" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "403_BatchNormalization" type: "Scale" bottom: "403" top: "403" scale_param { bias_term: true } } layer { name: "404_Add" type: "Eltwise" bottom: "395" bottom: "403" top: "404" eltwise_param { operation: SUM } } layer { name: "405_Conv" type: "Convolution" bottom: "404" top: "405" convolution_param { num_output: 384 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "406_BatchNormalization_bn" type: "BatchNorm" bottom: "405" top: "406" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "406_BatchNormalization" type: "Scale" bottom: "406" top: "406" scale_param { bias_term: true } } layer { name: "407_Relu" type: "ReLU" bottom: "406" top: "407" } layer { name: "408_Conv" type: "Convolution" bottom: "407" top: "408" convolution_param { num_output: 384 bias_term: false group: 384 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "409_BatchNormalization_bn" type: "BatchNorm" bottom: "408" top: "409" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "409_BatchNormalization" type: "Scale" bottom: "409" top: "409" scale_param { bias_term: true } } layer { name: "410_Relu" type: "ReLU" bottom: "409" top: "410" } layer { name: "411_Conv" type: "Convolution" bottom: "410" top: "411" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "412_BatchNormalization_bn" type: "BatchNorm" bottom: "411" top: "412" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "412_BatchNormalization" type: "Scale" bottom: "412" top: "412" scale_param { bias_term: true } } layer { name: "413_Conv" type: "Convolution" bottom: "412" top: "413" convolution_param { num_output: 576 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "414_BatchNormalization_bn" type: "BatchNorm" bottom: "413" top: "414" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "414_BatchNormalization" type: "Scale" bottom: "414" top: "414" scale_param { bias_term: true } } layer { name: "415_Relu" type: "ReLU" bottom: "414" top: "415" } layer { name: "416_Conv" type: "Convolution" bottom: "415" top: "416" convolution_param { num_output: 576 bias_term: false group: 576 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "417_BatchNormalization_bn" type: "BatchNorm" bottom: "416" top: "417" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "417_BatchNormalization" type: "Scale" bottom: "417" top: "417" scale_param { bias_term: true } } layer { name: "418_Relu" type: "ReLU" bottom: "417" top: "418" } layer { name: "419_Conv" type: "Convolution" bottom: "418" top: "419" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "420_BatchNormalization_bn" type: "BatchNorm" bottom: "419" top: "420" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "420_BatchNormalization" type: "Scale" bottom: "420" top: "420" scale_param { bias_term: true } } layer { name: "421_Add" type: "Eltwise" bottom: "412" bottom: "420" top: "421" eltwise_param { operation: SUM } } layer { name: "422_Conv" type: "Convolution" bottom: "421" top: "422" convolution_param { num_output: 576 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "423_BatchNormalization_bn" type: "BatchNorm" bottom: "422" top: "423" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "423_BatchNormalization" type: "Scale" bottom: "423" top: "423" scale_param { bias_term: true } } layer { name: "424_Relu" type: "ReLU" bottom: "423" top: "424" } layer { name: "425_Conv" type: "Convolution" bottom: "424" top: "425" convolution_param { num_output: 576 bias_term: false group: 576 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "426_BatchNormalization_bn" type: "BatchNorm" bottom: "425" top: "426" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "426_BatchNormalization" type: "Scale" bottom: "426" top: "426" scale_param { bias_term: true } } layer { name: "427_Relu" type: "ReLU" bottom: "426" top: "427" } layer { name: "428_Conv" type: "Convolution" bottom: "427" top: "428" convolution_param { num_output: 96 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "429_BatchNormalization_bn" type: "BatchNorm" bottom: "428" top: "429" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "429_BatchNormalization" type: "Scale" bottom: "429" top: "429" scale_param { bias_term: true } } layer { name: "430_Add" type: "Eltwise" bottom: "421" bottom: "429" top: "430" eltwise_param { operation: SUM } } layer { name: "431_Conv" type: "Convolution" bottom: "430" top: "431" convolution_param { num_output: 576 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "432_BatchNormalization_bn" type: "BatchNorm" bottom: "431" top: "432" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "432_BatchNormalization" type: "Scale" bottom: "432" top: "432" scale_param { bias_term: true } } layer { name: "433_Relu" type: "ReLU" bottom: "432" top: "433" } layer { name: "434_Conv" type: "Convolution" bottom: "433" top: "434" convolution_param { num_output: 576 bias_term: false group: 576 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "435_BatchNormalization_bn" type: "BatchNorm" bottom: "434" top: "435" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "435_BatchNormalization" type: "Scale" bottom: "435" top: "435" scale_param { bias_term: true } } layer { name: "436_Relu" type: "ReLU" bottom: "435" top: "436" } layer { name: "437_Conv" type: "Convolution" bottom: "436" top: "437" convolution_param { num_output: 160 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "438_BatchNormalization_bn" type: "BatchNorm" bottom: "437" top: "438" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "438_BatchNormalization" type: "Scale" bottom: "438" top: "438" scale_param { bias_term: true } } layer { name: "439_Conv" type: "Convolution" bottom: "438" top: "439" convolution_param { num_output: 960 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "440_BatchNormalization_bn" type: "BatchNorm" bottom: "439" top: "440" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "440_BatchNormalization" type: "Scale" bottom: "440" top: "440" scale_param { bias_term: true } } layer { name: "441_Relu" type: "ReLU" bottom: "440" top: "441" } layer { name: "442_Conv" type: "Convolution" bottom: "441" top: "442" convolution_param { num_output: 960 bias_term: false group: 960 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "443_BatchNormalization_bn" type: "BatchNorm" bottom: "442" top: "443" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "443_BatchNormalization" type: "Scale" bottom: "443" top: "443" scale_param { bias_term: true } } layer { name: "444_Relu" type: "ReLU" bottom: "443" top: "444" } layer { name: "445_Conv" type: "Convolution" bottom: "444" top: "445" convolution_param { num_output: 160 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "446_BatchNormalization_bn" type: "BatchNorm" bottom: "445" top: "446" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "446_BatchNormalization" type: "Scale" bottom: "446" top: "446" scale_param { bias_term: true } } layer { name: "447_Add" type: "Eltwise" bottom: "438" bottom: "446" top: "447" eltwise_param { operation: SUM } } layer { name: "448_Conv" type: "Convolution" bottom: "447" top: "448" convolution_param { num_output: 960 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "449_BatchNormalization_bn" type: "BatchNorm" bottom: "448" top: "449" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "449_BatchNormalization" type: "Scale" bottom: "449" top: "449" scale_param { bias_term: true } } layer { name: "450_Relu" type: "ReLU" bottom: "449" top: "450" } layer { name: "451_Conv" type: "Convolution" bottom: "450" top: "451" convolution_param { num_output: 960 bias_term: false group: 960 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "452_BatchNormalization_bn" type: "BatchNorm" bottom: "451" top: "452" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "452_BatchNormalization" type: "Scale" bottom: "452" top: "452" scale_param { bias_term: true } } layer { name: "453_Relu" type: "ReLU" bottom: "452" top: "453" } layer { name: "454_Conv" type: "Convolution" bottom: "453" top: "454" convolution_param { num_output: 160 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "455_BatchNormalization_bn" type: "BatchNorm" bottom: "454" top: "455" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "455_BatchNormalization" type: "Scale" bottom: "455" top: "455" scale_param { bias_term: true } } layer { name: "456_Add" type: "Eltwise" bottom: "447" bottom: "455" top: "456" eltwise_param { operation: SUM } } layer { name: "457_Conv" type: "Convolution" bottom: "456" top: "457" convolution_param { num_output: 960 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "458_BatchNormalization_bn" type: "BatchNorm" bottom: "457" top: "458" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "458_BatchNormalization" type: "Scale" bottom: "458" top: "458" scale_param { bias_term: true } } layer { name: "459_Relu" type: "ReLU" bottom: "458" top: "459" } layer { name: "460_Conv" type: "Convolution" bottom: "459" top: "460" convolution_param { num_output: 960 bias_term: false group: 960 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "461_BatchNormalization_bn" type: "BatchNorm" bottom: "460" top: "461" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "461_BatchNormalization" type: "Scale" bottom: "461" top: "461" scale_param { bias_term: true } } layer { name: "462_Relu" type: "ReLU" bottom: "461" top: "462" } layer { name: "463_Conv" type: "Convolution" bottom: "462" top: "463" convolution_param { num_output: 320 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "464_BatchNormalization_bn" type: "BatchNorm" bottom: "463" top: "464" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "464_BatchNormalization" type: "Scale" bottom: "464" top: "464" scale_param { bias_term: true } } layer { name: "465_Conv" type: "Convolution" bottom: "464" top: "465" convolution_param { num_output: 1280 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "466_BatchNormalization_bn" type: "BatchNorm" bottom: "465" top: "466" batch_norm_param { use_global_stats: true eps: 9.99999974738e-06 } } layer { name: "466_BatchNormalization" type: "Scale" bottom: "466" top: "466" scale_param { bias_term: true } } layer { name: "467_Relu" type: "ReLU" bottom: "466" top: "467" } layer { name: "468_AveragePool" type: "Pooling" bottom: "467" top: "468" pooling_param { pool: AVE kernel_h: 7 kernel_w: 7 stride_h: 7 stride_w: 7 pad_h: 0 pad_w: 0 } } layer { name: "470_Reshape" type: "Reshape" bottom: "468" top: "470" reshape_param { shape { dim: -1 dim: 1280 } } } layer { name: "471_Gemm" type: "InnerProduct" bottom: "470" top: "471" inner_product_param { num_output: 410 bias_term: true } } layer { name: "472_Gemm" type: "InnerProduct" bottom: "470" top: "472" inner_product_param { num_output: 113 bias_term: true } } layer { name: "473_Gemm" type: "InnerProduct" bottom: "470" top: "473" inner_product_param { num_output: 1 bias_term: true } } layer { name: "474_Sigmoid" type: "Sigmoid" bottom: "471" top: "474" } layer { name: "475_Sigmoid" type: "Sigmoid" bottom: "472" top: "475" } layer { name: "476_Sigmoid" type: "Sigmoid" bottom: "473" top: "476" }