layer { name: "input" type: "Input" top: "input" input_param { shape { dim: 1 dim: 3 dim: 400 dim: 400 } } } layer { name: "245" type: "Convolution" bottom: "input" top: "245" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "246_bn" type: "BatchNorm" bottom: "245" top: "246" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "246" type: "Scale" bottom: "246" top: "246" scale_param { bias_term: true } } layer { name: "247" type: "ReLU" bottom: "246" top: "247" } layer { name: "248" type: "Convolution" bottom: "247" top: "248" convolution_param { num_output: 16 bias_term: false group: 16 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "249_bn" type: "BatchNorm" bottom: "248" top: "249" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "249" type: "Scale" bottom: "249" top: "249" scale_param { bias_term: true } } layer { name: "250" type: "ReLU" bottom: "249" top: "250" } layer { name: "251" type: "Convolution" bottom: "250" top: "251" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "252_bn" type: "BatchNorm" bottom: "251" top: "252" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "252" type: "Scale" bottom: "252" top: "252" scale_param { bias_term: true } } layer { name: "253" type: "ReLU" bottom: "252" top: "253" } layer { name: "254" type: "Convolution" bottom: "253" top: "254" convolution_param { num_output: 32 bias_term: false group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "255_bn" type: "BatchNorm" bottom: "254" top: "255" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "255" type: "Scale" bottom: "255" top: "255" scale_param { bias_term: true } } layer { name: "256" type: "ReLU" bottom: "255" top: "256" } layer { name: "257" type: "Convolution" bottom: "256" top: "257" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "258_bn" type: "BatchNorm" bottom: "257" top: "258" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "258" type: "Scale" bottom: "258" top: "258" scale_param { bias_term: true } } layer { name: "259" type: "ReLU" bottom: "258" top: "259" } layer { name: "260" type: "Convolution" bottom: "259" top: "260" convolution_param { num_output: 32 bias_term: false group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "261_bn" type: "BatchNorm" bottom: "260" top: "261" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "261" type: "Scale" bottom: "261" top: "261" scale_param { bias_term: true } } layer { name: "262" type: "ReLU" bottom: "261" top: "262" } layer { name: "263" type: "Convolution" bottom: "262" top: "263" convolution_param { num_output: 32 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "264_bn" type: "BatchNorm" bottom: "263" top: "264" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "264" type: "Scale" bottom: "264" top: "264" scale_param { bias_term: true } } layer { name: "265" type: "ReLU" bottom: "264" top: "265" } layer { name: "266" type: "Eltwise" bottom: "259" bottom: "265" top: "266" eltwise_param { operation: SUM } } layer { name: "267" type: "Convolution" bottom: "266" top: "267" convolution_param { num_output: 32 bias_term: false group: 32 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "268_bn" type: "BatchNorm" bottom: "267" top: "268" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "268" type: "Scale" bottom: "268" top: "268" scale_param { bias_term: true } } layer { name: "269" type: "ReLU" bottom: "268" top: "269" } layer { name: "270" type: "Convolution" bottom: "269" top: "270" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "271_bn" type: "BatchNorm" bottom: "270" top: "271" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "271" type: "Scale" bottom: "271" top: "271" scale_param { bias_term: true } } layer { name: "272" type: "ReLU" bottom: "271" top: "272" } layer { name: "273" type: "Convolution" bottom: "272" top: "273" convolution_param { num_output: 64 bias_term: false group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "274_bn" type: "BatchNorm" bottom: "273" top: "274" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "274" type: "Scale" bottom: "274" top: "274" scale_param { bias_term: true } } layer { name: "275" type: "ReLU" bottom: "274" top: "275" } layer { name: "276" type: "Convolution" bottom: "275" top: "276" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "277_bn" type: "BatchNorm" bottom: "276" top: "277" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "277" type: "Scale" bottom: "277" top: "277" scale_param { bias_term: true } } layer { name: "278" type: "ReLU" bottom: "277" top: "278" } layer { name: "279" type: "Eltwise" bottom: "272" bottom: "278" top: "279" eltwise_param { operation: SUM } } layer { name: "280" type: "Convolution" bottom: "279" top: "280" convolution_param { num_output: 64 bias_term: false group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "281_bn" type: "BatchNorm" bottom: "280" top: "281" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "281" type: "Scale" bottom: "281" top: "281" scale_param { bias_term: true } } layer { name: "282" type: "ReLU" bottom: "281" top: "282" } layer { name: "283" type: "Convolution" bottom: "282" top: "283" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "284_bn" type: "BatchNorm" bottom: "283" top: "284" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "284" type: "Scale" bottom: "284" top: "284" scale_param { bias_term: true } } layer { name: "285" type: "ReLU" bottom: "284" top: "285" } layer { name: "286" type: "Eltwise" bottom: "279" bottom: "285" top: "286" eltwise_param { operation: SUM } } layer { name: "287" type: "Convolution" bottom: "286" top: "287" convolution_param { num_output: 8 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "288_bn" type: "BatchNorm" bottom: "287" top: "288" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "288" type: "Scale" bottom: "288" top: "288" scale_param { bias_term: true } } layer { name: "289" type: "Convolution" bottom: "288" top: "289" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "290_bn" type: "BatchNorm" bottom: "289" top: "290" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "290" type: "Scale" bottom: "290" top: "290" scale_param { bias_term: true } } layer { name: "291" type: "ReLU" bottom: "290" top: "291" } layer { name: "292" type: "Convolution" bottom: "291" top: "292" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 2 pad_w: 2 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 2 } } layer { name: "293_bn" type: "BatchNorm" bottom: "292" top: "293" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "293" type: "Scale" bottom: "293" top: "293" scale_param { bias_term: true } } layer { name: "294" type: "Convolution" bottom: "286" top: "294" convolution_param { num_output: 8 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "295_bn" type: "BatchNorm" bottom: "294" top: "295" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "295" type: "Scale" bottom: "295" top: "295" scale_param { bias_term: true } } layer { name: "296" type: "Convolution" bottom: "295" top: "296" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "297_bn" type: "BatchNorm" bottom: "296" top: "297" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "297" type: "Scale" bottom: "297" top: "297" scale_param { bias_term: true } } layer { name: "298" type: "ReLU" bottom: "297" top: "298" } layer { name: "299" type: "Convolution" bottom: "298" top: "299" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 3 pad_w: 3 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 3 } } layer { name: "300_bn" type: "BatchNorm" bottom: "299" top: "300" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "300" type: "Scale" bottom: "300" top: "300" scale_param { bias_term: true } } layer { name: "301" type: "Convolution" bottom: "286" top: "301" convolution_param { num_output: 8 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "302_bn" type: "BatchNorm" bottom: "301" top: "302" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "302" type: "Scale" bottom: "302" top: "302" scale_param { bias_term: true } } layer { name: "303" type: "Convolution" bottom: "302" top: "303" convolution_param { num_output: 12 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "304_bn" type: "BatchNorm" bottom: "303" top: "304" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "304" type: "Scale" bottom: "304" top: "304" scale_param { bias_term: true } } layer { name: "305" type: "ReLU" bottom: "304" top: "305" } layer { name: "306" type: "Convolution" bottom: "305" top: "306" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "307_bn" type: "BatchNorm" bottom: "306" top: "307" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "307" type: "Scale" bottom: "307" top: "307" scale_param { bias_term: true } } layer { name: "308" type: "ReLU" bottom: "307" top: "308" } layer { name: "309" type: "Convolution" bottom: "308" top: "309" convolution_param { num_output: 16 bias_term: false group: 1 pad_h: 5 pad_w: 5 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 5 } } layer { name: "310_bn" type: "BatchNorm" bottom: "309" top: "310" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "310" type: "Scale" bottom: "310" top: "310" scale_param { bias_term: true } } layer { name: "311" type: "Concat" bottom: "293" bottom: "300" bottom: "310" top: "311" concat_param { axis: 1 } } layer { name: "312" type: "Convolution" bottom: "311" top: "312" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "313_bn" type: "BatchNorm" bottom: "312" top: "313" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "313" type: "Scale" bottom: "313" top: "313" scale_param { bias_term: true } } layer { name: "314" type: "Convolution" bottom: "286" top: "314" convolution_param { num_output: 64 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "315_bn" type: "BatchNorm" bottom: "314" top: "315" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "315" type: "Scale" bottom: "315" top: "315" scale_param { bias_term: true } } layer { name: "316" type: "Eltwise" bottom: "313" bottom: "315" top: "316" eltwise_param { operation: SUM } } layer { name: "317" type: "ReLU" bottom: "316" top: "317" } layer { name: "318" type: "Convolution" bottom: "317" top: "318" convolution_param { num_output: 64 bias_term: true group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "319" type: "ReLU" bottom: "318" top: "319" } layer { name: "320" type: "Convolution" bottom: "319" top: "320" convolution_param { num_output: 6 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "321" type: "Permute" bottom: "320" top: "321" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "331" type: "Reshape" bottom: "321" top: "331" reshape_param { shape { dim: 1 dim: -1 dim: 2 } } } layer { name: "332" type: "Convolution" bottom: "317" top: "332" convolution_param { num_output: 64 bias_term: true group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "333" type: "ReLU" bottom: "332" top: "333" } layer { name: "334" type: "Convolution" bottom: "333" top: "334" convolution_param { num_output: 12 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "335" type: "Permute" bottom: "334" top: "335" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "345" type: "Reshape" bottom: "335" top: "345" reshape_param { shape { dim: 1 dim: -1 dim: 4 } } } layer { name: "346" type: "Convolution" bottom: "317" top: "346" convolution_param { num_output: 64 bias_term: false group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "347_bn" type: "BatchNorm" bottom: "346" top: "347" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "347" type: "Scale" bottom: "347" top: "347" scale_param { bias_term: true } } layer { name: "348" type: "ReLU" bottom: "347" top: "348" } layer { name: "349" type: "Convolution" bottom: "348" top: "349" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "350_bn" type: "BatchNorm" bottom: "349" top: "350" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "350" type: "Scale" bottom: "350" top: "350" scale_param { bias_term: true } } layer { name: "351" type: "ReLU" bottom: "350" top: "351" } layer { name: "352" type: "Convolution" bottom: "351" top: "352" convolution_param { num_output: 128 bias_term: false group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "353_bn" type: "BatchNorm" bottom: "352" top: "353" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "353" type: "Scale" bottom: "353" top: "353" scale_param { bias_term: true } } layer { name: "354" type: "ReLU" bottom: "353" top: "354" } layer { name: "355" type: "Convolution" bottom: "354" top: "355" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "356_bn" type: "BatchNorm" bottom: "355" top: "356" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "356" type: "Scale" bottom: "356" top: "356" scale_param { bias_term: true } } layer { name: "357" type: "ReLU" bottom: "356" top: "357" } layer { name: "358" type: "Eltwise" bottom: "351" bottom: "357" top: "358" eltwise_param { operation: SUM } } layer { name: "359" type: "Convolution" bottom: "358" top: "359" convolution_param { num_output: 128 bias_term: false group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "360_bn" type: "BatchNorm" bottom: "359" top: "360" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "360" type: "Scale" bottom: "360" top: "360" scale_param { bias_term: true } } layer { name: "361" type: "ReLU" bottom: "360" top: "361" } layer { name: "362" type: "Convolution" bottom: "361" top: "362" convolution_param { num_output: 128 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "363_bn" type: "BatchNorm" bottom: "362" top: "363" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "363" type: "Scale" bottom: "363" top: "363" scale_param { bias_term: true } } layer { name: "364" type: "ReLU" bottom: "363" top: "364" } layer { name: "365" type: "Eltwise" bottom: "358" bottom: "364" top: "365" eltwise_param { operation: SUM } } layer { name: "366" type: "Convolution" bottom: "365" top: "366" convolution_param { num_output: 128 bias_term: true group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "367" type: "ReLU" bottom: "366" top: "367" } layer { name: "368" type: "Convolution" bottom: "367" top: "368" convolution_param { num_output: 6 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "369" type: "Permute" bottom: "368" top: "369" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "379" type: "Reshape" bottom: "369" top: "379" reshape_param { shape { dim: 1 dim: -1 dim: 2 } } } layer { name: "380" type: "Convolution" bottom: "365" top: "380" convolution_param { num_output: 128 bias_term: true group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "381" type: "ReLU" bottom: "380" top: "381" } layer { name: "382" type: "Convolution" bottom: "381" top: "382" convolution_param { num_output: 12 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "383" type: "Permute" bottom: "382" top: "383" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "393" type: "Reshape" bottom: "383" top: "393" reshape_param { shape { dim: 1 dim: -1 dim: 4 } } } layer { name: "394" type: "Convolution" bottom: "365" top: "394" convolution_param { num_output: 128 bias_term: false group: 128 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "395_bn" type: "BatchNorm" bottom: "394" top: "395" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "395" type: "Scale" bottom: "395" top: "395" scale_param { bias_term: true } } layer { name: "396" type: "ReLU" bottom: "395" top: "396" } layer { name: "397" type: "Convolution" bottom: "396" top: "397" convolution_param { num_output: 256 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "398_bn" type: "BatchNorm" bottom: "397" top: "398" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "398" type: "Scale" bottom: "398" top: "398" scale_param { bias_term: true } } layer { name: "399" type: "ReLU" bottom: "398" top: "399" } layer { name: "400" type: "Convolution" bottom: "399" top: "400" convolution_param { num_output: 256 bias_term: false group: 256 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "401_bn" type: "BatchNorm" bottom: "400" top: "401" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "401" type: "Scale" bottom: "401" top: "401" scale_param { bias_term: true } } layer { name: "402" type: "ReLU" bottom: "401" top: "402" } layer { name: "403" type: "Convolution" bottom: "402" top: "403" convolution_param { num_output: 256 bias_term: false group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "404_bn" type: "BatchNorm" bottom: "403" top: "404" batch_norm_param { use_global_stats: true eps: 9.999999747378752e-06 } } layer { name: "404" type: "Scale" bottom: "404" top: "404" scale_param { bias_term: true } } layer { name: "405" type: "ReLU" bottom: "404" top: "405" } layer { name: "406" type: "Eltwise" bottom: "399" bottom: "405" top: "406" eltwise_param { operation: SUM } } layer { name: "407" type: "Convolution" bottom: "406" top: "407" convolution_param { num_output: 256 bias_term: true group: 256 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "408" type: "ReLU" bottom: "407" top: "408" } layer { name: "409" type: "Convolution" bottom: "408" top: "409" convolution_param { num_output: 6 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "410" type: "Permute" bottom: "409" top: "410" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "420" type: "Reshape" bottom: "410" top: "420" reshape_param { shape { dim: 1 dim: -1 dim: 2 } } } layer { name: "421" type: "Convolution" bottom: "406" top: "421" convolution_param { num_output: 256 bias_term: true group: 256 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "422" type: "ReLU" bottom: "421" top: "422" } layer { name: "423" type: "Convolution" bottom: "422" top: "423" convolution_param { num_output: 12 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "424" type: "Permute" bottom: "423" top: "424" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "434" type: "Reshape" bottom: "424" top: "434" reshape_param { shape { dim: 1 dim: -1 dim: 4 } } } layer { name: "435" type: "Convolution" bottom: "406" top: "435" convolution_param { num_output: 64 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "436" type: "ReLU" bottom: "435" top: "436" } layer { name: "437" type: "Convolution" bottom: "436" top: "437" convolution_param { num_output: 64 bias_term: true group: 64 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 2 stride_w: 2 dilation: 1 } } layer { name: "438" type: "ReLU" bottom: "437" top: "438" } layer { name: "439" type: "Convolution" bottom: "438" top: "439" convolution_param { num_output: 256 bias_term: true group: 1 pad_h: 0 pad_w: 0 kernel_h: 1 kernel_w: 1 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "440" type: "ReLU" bottom: "439" top: "440" } layer { name: "441" type: "Convolution" bottom: "440" top: "441" convolution_param { num_output: 6 bias_term: true group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "442" type: "Permute" bottom: "441" top: "442" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "452" type: "Reshape" bottom: "442" top: "452" reshape_param { shape { dim: 1 dim: -1 dim: 2 } } } layer { name: "453" type: "Convolution" bottom: "440" top: "453" convolution_param { num_output: 12 bias_term: true group: 1 pad_h: 1 pad_w: 1 kernel_h: 3 kernel_w: 3 stride_h: 1 stride_w: 1 dilation: 1 } } layer { name: "454" type: "Permute" bottom: "453" top: "454" permute_param { order: 0 order: 2 order: 3 order: 1 } } layer { name: "464" type: "Reshape" bottom: "454" top: "464" reshape_param { shape { dim: 1 dim: -1 dim: 4 } } } layer { name: "465" type: "Concat" bottom: "331" bottom: "379" bottom: "420" bottom: "452" top: "465" concat_param { axis: 1 } } layer { name: "boxes" type: "Concat" bottom: "345" bottom: "393" bottom: "434" bottom: "464" top: "boxes" concat_param { axis: 1 } } layer { name: "scores" type: "Softmax" bottom: "465" top: "scores" softmax_param { axis: 2 } }