Skip to content

Commit 996462f

Browse files
authoredDec 17, 2016
update resnext-50
1 parent b539a73 commit 996462f

File tree

1 file changed

+2474
-0
lines changed

1 file changed

+2474
-0
lines changed
 

‎ResNeXt-50-deploy.prototxt

+2,474
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2474 @@
1+
name: "ResNet"
2+
layer {
3+
name: "data"
4+
type: "Input"
5+
top: "data"
6+
input_param { shape: { dim: 1 dim: 3 dim: 224 dim: 224 } }
7+
}
8+
9+
layer {
10+
name: "bn_data"
11+
type: "BatchNorm"
12+
bottom: "data"
13+
top: "data"
14+
batch_norm_param {
15+
use_global_stats: true
16+
eps: 2e-5
17+
}
18+
}
19+
20+
layer {
21+
name: "scale_bn_data"
22+
bottom: "data"
23+
top: "data"
24+
type: "Scale"
25+
scale_param {
26+
bias_term: true
27+
}
28+
}
29+
30+
layer {
31+
name: "conv0"
32+
type: "Convolution"
33+
bottom: "data"
34+
top: "conv0"
35+
convolution_param {
36+
num_output: 64
37+
kernel_size: 7
38+
stride: 2
39+
pad: 3
40+
bias_term: false
41+
}
42+
}
43+
44+
layer {
45+
name: "bn0"
46+
type: "BatchNorm"
47+
bottom: "conv0"
48+
top: "conv0"
49+
batch_norm_param {
50+
use_global_stats: true
51+
eps: 2e-5
52+
}
53+
}
54+
55+
layer {
56+
name: "scale_bn0"
57+
bottom: "conv0"
58+
top: "conv0"
59+
type: "Scale"
60+
scale_param {
61+
bias_term: true
62+
}
63+
}
64+
65+
layer {
66+
name: "relu0"
67+
type: "ReLU"
68+
bottom: "conv0"
69+
top: "conv0"
70+
}
71+
72+
layer {
73+
name: "pooling0"
74+
type: "Pooling"
75+
bottom: "conv0"
76+
top: "pooling0"
77+
pooling_param {
78+
pool: MAX
79+
kernel_size: 3
80+
stride: 2
81+
}
82+
}
83+
84+
layer {
85+
name: "stage1_unit1_conv1"
86+
type: "Convolution"
87+
bottom: "pooling0"
88+
top: "stage1_unit1_conv1"
89+
convolution_param {
90+
num_output: 128
91+
kernel_size: 1
92+
stride: 1
93+
pad: 0
94+
bias_term: false
95+
}
96+
}
97+
98+
layer {
99+
name: "stage1_unit1_bn1"
100+
type: "BatchNorm"
101+
bottom: "stage1_unit1_conv1"
102+
top: "stage1_unit1_conv1"
103+
batch_norm_param {
104+
use_global_stats: true
105+
eps: 2e-5
106+
}
107+
}
108+
109+
layer {
110+
name: "scale_stage1_unit1_bn1"
111+
bottom: "stage1_unit1_conv1"
112+
top: "stage1_unit1_conv1"
113+
type: "Scale"
114+
scale_param {
115+
bias_term: true
116+
}
117+
}
118+
119+
layer {
120+
name: "stage1_unit1_relu1"
121+
type: "ReLU"
122+
bottom: "stage1_unit1_conv1"
123+
top: "stage1_unit1_conv1"
124+
}
125+
126+
layer {
127+
name: "stage1_unit1_conv2"
128+
type: "Convolution"
129+
bottom: "stage1_unit1_conv1"
130+
top: "stage1_unit1_conv2"
131+
convolution_param {
132+
num_output: 128
133+
kernel_size: 3
134+
stride: 1
135+
group: 32
136+
pad: 1
137+
bias_term: false
138+
}
139+
}
140+
141+
layer {
142+
name: "stage1_unit1_bn2"
143+
type: "BatchNorm"
144+
bottom: "stage1_unit1_conv2"
145+
top: "stage1_unit1_conv2"
146+
batch_norm_param {
147+
use_global_stats: true
148+
eps: 2e-5
149+
}
150+
}
151+
152+
layer {
153+
name: "scale_stage1_unit1_bn2"
154+
bottom: "stage1_unit1_conv2"
155+
top: "stage1_unit1_conv2"
156+
type: "Scale"
157+
scale_param {
158+
bias_term: true
159+
}
160+
}
161+
162+
layer {
163+
name: "stage1_unit1_relu2"
164+
type: "ReLU"
165+
bottom: "stage1_unit1_conv2"
166+
top: "stage1_unit1_conv2"
167+
}
168+
169+
layer {
170+
name: "stage1_unit1_conv3"
171+
type: "Convolution"
172+
bottom: "stage1_unit1_conv2"
173+
top: "stage1_unit1_conv3"
174+
convolution_param {
175+
num_output: 256
176+
kernel_size: 1
177+
stride: 1
178+
pad: 0
179+
bias_term: false
180+
}
181+
}
182+
183+
layer {
184+
name: "stage1_unit1_bn3"
185+
type: "BatchNorm"
186+
bottom: "stage1_unit1_conv3"
187+
top: "stage1_unit1_conv3"
188+
batch_norm_param {
189+
use_global_stats: true
190+
eps: 2e-5
191+
}
192+
}
193+
194+
layer {
195+
name: "scale_stage1_unit1_bn3"
196+
bottom: "stage1_unit1_conv3"
197+
top: "stage1_unit1_conv3"
198+
type: "Scale"
199+
scale_param {
200+
bias_term: true
201+
}
202+
}
203+
204+
layer {
205+
name: "stage1_unit1_sc"
206+
type: "Convolution"
207+
bottom: "pooling0"
208+
top: "stage1_unit1_sc"
209+
convolution_param {
210+
num_output: 256
211+
kernel_size: 1
212+
stride: 1
213+
pad: 0
214+
bias_term: false
215+
}
216+
}
217+
218+
layer {
219+
name: "stage1_unit1_sc_bn"
220+
type: "BatchNorm"
221+
bottom: "stage1_unit1_sc"
222+
top: "stage1_unit1_sc"
223+
batch_norm_param {
224+
use_global_stats: true
225+
eps: 2e-5
226+
}
227+
}
228+
229+
layer {
230+
name: "scale_stage1_unit1_sc_bn"
231+
bottom: "stage1_unit1_sc"
232+
top: "stage1_unit1_sc"
233+
type: "Scale"
234+
scale_param {
235+
bias_term: true
236+
}
237+
}
238+
239+
layer {
240+
name: "stage1_unit1_plus"
241+
type: "Eltwise"
242+
bottom: "stage1_unit1_sc"
243+
bottom: "stage1_unit1_conv3"
244+
top: "stage1_unit1_plus"
245+
eltwise_param {
246+
operation: SUM
247+
}
248+
}
249+
250+
layer {
251+
name: "stage1_unit1_relu"
252+
type: "ReLU"
253+
bottom: "stage1_unit1_plus"
254+
top: "stage1_unit1_plus"
255+
}
256+
257+
layer {
258+
name: "stage1_unit2_conv1"
259+
type: "Convolution"
260+
bottom: "stage1_unit1_plus"
261+
top: "stage1_unit2_conv1"
262+
convolution_param {
263+
num_output: 128
264+
kernel_size: 1
265+
stride: 1
266+
pad: 0
267+
bias_term: false
268+
}
269+
}
270+
271+
layer {
272+
name: "stage1_unit2_bn1"
273+
type: "BatchNorm"
274+
bottom: "stage1_unit2_conv1"
275+
top: "stage1_unit2_conv1"
276+
batch_norm_param {
277+
use_global_stats: true
278+
eps: 2e-5
279+
}
280+
}
281+
282+
layer {
283+
name: "scale_stage1_unit2_bn1"
284+
bottom: "stage1_unit2_conv1"
285+
top: "stage1_unit2_conv1"
286+
type: "Scale"
287+
scale_param {
288+
bias_term: true
289+
}
290+
}
291+
292+
layer {
293+
name: "stage1_unit2_relu1"
294+
type: "ReLU"
295+
bottom: "stage1_unit2_conv1"
296+
top: "stage1_unit2_conv1"
297+
}
298+
299+
layer {
300+
name: "stage1_unit2_conv2"
301+
type: "Convolution"
302+
bottom: "stage1_unit2_conv1"
303+
top: "stage1_unit2_conv2"
304+
convolution_param {
305+
num_output: 128
306+
kernel_size: 3
307+
stride: 1
308+
group: 32
309+
pad: 1
310+
bias_term: false
311+
}
312+
}
313+
314+
layer {
315+
name: "stage1_unit2_bn2"
316+
type: "BatchNorm"
317+
bottom: "stage1_unit2_conv2"
318+
top: "stage1_unit2_conv2"
319+
batch_norm_param {
320+
use_global_stats: true
321+
eps: 2e-5
322+
}
323+
}
324+
325+
layer {
326+
name: "scale_stage1_unit2_bn2"
327+
bottom: "stage1_unit2_conv2"
328+
top: "stage1_unit2_conv2"
329+
type: "Scale"
330+
scale_param {
331+
bias_term: true
332+
}
333+
}
334+
335+
layer {
336+
name: "stage1_unit2_relu2"
337+
type: "ReLU"
338+
bottom: "stage1_unit2_conv2"
339+
top: "stage1_unit2_conv2"
340+
}
341+
342+
layer {
343+
name: "stage1_unit2_conv3"
344+
type: "Convolution"
345+
bottom: "stage1_unit2_conv2"
346+
top: "stage1_unit2_conv3"
347+
convolution_param {
348+
num_output: 256
349+
kernel_size: 1
350+
stride: 1
351+
pad: 0
352+
bias_term: false
353+
}
354+
}
355+
356+
layer {
357+
name: "stage1_unit2_bn3"
358+
type: "BatchNorm"
359+
bottom: "stage1_unit2_conv3"
360+
top: "stage1_unit2_conv3"
361+
batch_norm_param {
362+
use_global_stats: true
363+
eps: 2e-5
364+
}
365+
}
366+
367+
layer {
368+
name: "scale_stage1_unit2_bn3"
369+
bottom: "stage1_unit2_conv3"
370+
top: "stage1_unit2_conv3"
371+
type: "Scale"
372+
scale_param {
373+
bias_term: true
374+
}
375+
}
376+
377+
layer {
378+
name: "stage1_unit2_plus"
379+
type: "Eltwise"
380+
bottom: "stage1_unit1_plus"
381+
bottom: "stage1_unit2_conv3"
382+
top: "stage1_unit2_plus"
383+
eltwise_param {
384+
operation: SUM
385+
}
386+
}
387+
388+
layer {
389+
name: "stage1_unit2_relu"
390+
type: "ReLU"
391+
bottom: "stage1_unit2_plus"
392+
top: "stage1_unit2_plus"
393+
}
394+
395+
layer {
396+
name: "stage1_unit3_conv1"
397+
type: "Convolution"
398+
bottom: "stage1_unit2_plus"
399+
top: "stage1_unit3_conv1"
400+
convolution_param {
401+
num_output: 128
402+
kernel_size: 1
403+
stride: 1
404+
pad: 0
405+
bias_term: false
406+
}
407+
}
408+
409+
layer {
410+
name: "stage1_unit3_bn1"
411+
type: "BatchNorm"
412+
bottom: "stage1_unit3_conv1"
413+
top: "stage1_unit3_conv1"
414+
batch_norm_param {
415+
use_global_stats: true
416+
eps: 2e-5
417+
}
418+
}
419+
420+
layer {
421+
name: "scale_stage1_unit3_bn1"
422+
bottom: "stage1_unit3_conv1"
423+
top: "stage1_unit3_conv1"
424+
type: "Scale"
425+
scale_param {
426+
bias_term: true
427+
}
428+
}
429+
430+
layer {
431+
name: "stage1_unit3_relu1"
432+
type: "ReLU"
433+
bottom: "stage1_unit3_conv1"
434+
top: "stage1_unit3_conv1"
435+
}
436+
437+
layer {
438+
name: "stage1_unit3_conv2"
439+
type: "Convolution"
440+
bottom: "stage1_unit3_conv1"
441+
top: "stage1_unit3_conv2"
442+
convolution_param {
443+
num_output: 128
444+
kernel_size: 3
445+
stride: 1
446+
group: 32
447+
pad: 1
448+
bias_term: false
449+
}
450+
}
451+
452+
layer {
453+
name: "stage1_unit3_bn2"
454+
type: "BatchNorm"
455+
bottom: "stage1_unit3_conv2"
456+
top: "stage1_unit3_conv2"
457+
batch_norm_param {
458+
use_global_stats: true
459+
eps: 2e-5
460+
}
461+
}
462+
463+
layer {
464+
name: "scale_stage1_unit3_bn2"
465+
bottom: "stage1_unit3_conv2"
466+
top: "stage1_unit3_conv2"
467+
type: "Scale"
468+
scale_param {
469+
bias_term: true
470+
}
471+
}
472+
473+
layer {
474+
name: "stage1_unit3_relu2"
475+
type: "ReLU"
476+
bottom: "stage1_unit3_conv2"
477+
top: "stage1_unit3_conv2"
478+
}
479+
480+
layer {
481+
name: "stage1_unit3_conv3"
482+
type: "Convolution"
483+
bottom: "stage1_unit3_conv2"
484+
top: "stage1_unit3_conv3"
485+
convolution_param {
486+
num_output: 256
487+
kernel_size: 1
488+
stride: 1
489+
pad: 0
490+
bias_term: false
491+
}
492+
}
493+
494+
layer {
495+
name: "stage1_unit3_bn3"
496+
type: "BatchNorm"
497+
bottom: "stage1_unit3_conv3"
498+
top: "stage1_unit3_conv3"
499+
batch_norm_param {
500+
use_global_stats: true
501+
eps: 2e-5
502+
}
503+
}
504+
505+
layer {
506+
name: "scale_stage1_unit3_bn3"
507+
bottom: "stage1_unit3_conv3"
508+
top: "stage1_unit3_conv3"
509+
type: "Scale"
510+
scale_param {
511+
bias_term: true
512+
}
513+
}
514+
515+
layer {
516+
name: "stage1_unit3_plus"
517+
type: "Eltwise"
518+
bottom: "stage1_unit2_plus"
519+
bottom: "stage1_unit3_conv3"
520+
top: "stage1_unit3_plus"
521+
eltwise_param {
522+
operation: SUM
523+
}
524+
}
525+
526+
layer {
527+
name: "stage1_unit3_relu"
528+
type: "ReLU"
529+
bottom: "stage1_unit3_plus"
530+
top: "stage1_unit3_plus"
531+
}
532+
533+
layer {
534+
name: "stage2_unit1_conv1"
535+
type: "Convolution"
536+
bottom: "stage1_unit3_plus"
537+
top: "stage2_unit1_conv1"
538+
convolution_param {
539+
num_output: 256
540+
kernel_size: 1
541+
stride: 1
542+
pad: 0
543+
bias_term: false
544+
}
545+
}
546+
547+
layer {
548+
name: "stage2_unit1_bn1"
549+
type: "BatchNorm"
550+
bottom: "stage2_unit1_conv1"
551+
top: "stage2_unit1_conv1"
552+
batch_norm_param {
553+
use_global_stats: true
554+
eps: 2e-5
555+
}
556+
}
557+
558+
layer {
559+
name: "scale_stage2_unit1_bn1"
560+
bottom: "stage2_unit1_conv1"
561+
top: "stage2_unit1_conv1"
562+
type: "Scale"
563+
scale_param {
564+
bias_term: true
565+
}
566+
}
567+
568+
layer {
569+
name: "stage2_unit1_relu1"
570+
type: "ReLU"
571+
bottom: "stage2_unit1_conv1"
572+
top: "stage2_unit1_conv1"
573+
}
574+
575+
layer {
576+
name: "stage2_unit1_conv2"
577+
type: "Convolution"
578+
bottom: "stage2_unit1_conv1"
579+
top: "stage2_unit1_conv2"
580+
convolution_param {
581+
num_output: 256
582+
kernel_size: 3
583+
stride: 2
584+
group: 32
585+
pad: 1
586+
bias_term: false
587+
}
588+
}
589+
590+
layer {
591+
name: "stage2_unit1_bn2"
592+
type: "BatchNorm"
593+
bottom: "stage2_unit1_conv2"
594+
top: "stage2_unit1_conv2"
595+
batch_norm_param {
596+
use_global_stats: true
597+
eps: 2e-5
598+
}
599+
}
600+
601+
layer {
602+
name: "scale_stage2_unit1_bn2"
603+
bottom: "stage2_unit1_conv2"
604+
top: "stage2_unit1_conv2"
605+
type: "Scale"
606+
scale_param {
607+
bias_term: true
608+
}
609+
}
610+
611+
layer {
612+
name: "stage2_unit1_relu2"
613+
type: "ReLU"
614+
bottom: "stage2_unit1_conv2"
615+
top: "stage2_unit1_conv2"
616+
}
617+
618+
layer {
619+
name: "stage2_unit1_conv3"
620+
type: "Convolution"
621+
bottom: "stage2_unit1_conv2"
622+
top: "stage2_unit1_conv3"
623+
convolution_param {
624+
num_output: 512
625+
kernel_size: 1
626+
stride: 1
627+
pad: 0
628+
bias_term: false
629+
}
630+
}
631+
632+
layer {
633+
name: "stage2_unit1_bn3"
634+
type: "BatchNorm"
635+
bottom: "stage2_unit1_conv3"
636+
top: "stage2_unit1_conv3"
637+
batch_norm_param {
638+
use_global_stats: true
639+
eps: 2e-5
640+
}
641+
}
642+
643+
layer {
644+
name: "scale_stage2_unit1_bn3"
645+
bottom: "stage2_unit1_conv3"
646+
top: "stage2_unit1_conv3"
647+
type: "Scale"
648+
scale_param {
649+
bias_term: true
650+
}
651+
}
652+
653+
layer {
654+
name: "stage2_unit1_sc"
655+
type: "Convolution"
656+
bottom: "stage1_unit3_plus"
657+
top: "stage2_unit1_sc"
658+
convolution_param {
659+
num_output: 512
660+
kernel_size: 1
661+
stride: 2
662+
pad: 0
663+
bias_term: false
664+
}
665+
}
666+
667+
layer {
668+
name: "stage2_unit1_sc_bn"
669+
type: "BatchNorm"
670+
bottom: "stage2_unit1_sc"
671+
top: "stage2_unit1_sc"
672+
batch_norm_param {
673+
use_global_stats: true
674+
eps: 2e-5
675+
}
676+
}
677+
678+
layer {
679+
name: "scale_stage2_unit1_sc_bn"
680+
bottom: "stage2_unit1_sc"
681+
top: "stage2_unit1_sc"
682+
type: "Scale"
683+
scale_param {
684+
bias_term: true
685+
}
686+
}
687+
688+
layer {
689+
name: "stage2_unit1_plus"
690+
type: "Eltwise"
691+
bottom: "stage2_unit1_sc"
692+
bottom: "stage2_unit1_conv3"
693+
top: "stage2_unit1_plus"
694+
eltwise_param {
695+
operation: SUM
696+
}
697+
}
698+
699+
layer {
700+
name: "stage2_unit1_relu"
701+
type: "ReLU"
702+
bottom: "stage2_unit1_plus"
703+
top: "stage2_unit1_plus"
704+
}
705+
706+
layer {
707+
name: "stage2_unit2_conv1"
708+
type: "Convolution"
709+
bottom: "stage2_unit1_plus"
710+
top: "stage2_unit2_conv1"
711+
convolution_param {
712+
num_output: 256
713+
kernel_size: 1
714+
stride: 1
715+
pad: 0
716+
bias_term: false
717+
}
718+
}
719+
720+
layer {
721+
name: "stage2_unit2_bn1"
722+
type: "BatchNorm"
723+
bottom: "stage2_unit2_conv1"
724+
top: "stage2_unit2_conv1"
725+
batch_norm_param {
726+
use_global_stats: true
727+
eps: 2e-5
728+
}
729+
}
730+
731+
layer {
732+
name: "scale_stage2_unit2_bn1"
733+
bottom: "stage2_unit2_conv1"
734+
top: "stage2_unit2_conv1"
735+
type: "Scale"
736+
scale_param {
737+
bias_term: true
738+
}
739+
}
740+
741+
layer {
742+
name: "stage2_unit2_relu1"
743+
type: "ReLU"
744+
bottom: "stage2_unit2_conv1"
745+
top: "stage2_unit2_conv1"
746+
}
747+
748+
layer {
749+
name: "stage2_unit2_conv2"
750+
type: "Convolution"
751+
bottom: "stage2_unit2_conv1"
752+
top: "stage2_unit2_conv2"
753+
convolution_param {
754+
num_output: 256
755+
kernel_size: 3
756+
stride: 1
757+
group: 32
758+
pad: 1
759+
bias_term: false
760+
}
761+
}
762+
763+
layer {
764+
name: "stage2_unit2_bn2"
765+
type: "BatchNorm"
766+
bottom: "stage2_unit2_conv2"
767+
top: "stage2_unit2_conv2"
768+
batch_norm_param {
769+
use_global_stats: true
770+
eps: 2e-5
771+
}
772+
}
773+
774+
layer {
775+
name: "scale_stage2_unit2_bn2"
776+
bottom: "stage2_unit2_conv2"
777+
top: "stage2_unit2_conv2"
778+
type: "Scale"
779+
scale_param {
780+
bias_term: true
781+
}
782+
}
783+
784+
layer {
785+
name: "stage2_unit2_relu2"
786+
type: "ReLU"
787+
bottom: "stage2_unit2_conv2"
788+
top: "stage2_unit2_conv2"
789+
}
790+
791+
layer {
792+
name: "stage2_unit2_conv3"
793+
type: "Convolution"
794+
bottom: "stage2_unit2_conv2"
795+
top: "stage2_unit2_conv3"
796+
convolution_param {
797+
num_output: 512
798+
kernel_size: 1
799+
stride: 1
800+
pad: 0
801+
bias_term: false
802+
}
803+
}
804+
805+
layer {
806+
name: "stage2_unit2_bn3"
807+
type: "BatchNorm"
808+
bottom: "stage2_unit2_conv3"
809+
top: "stage2_unit2_conv3"
810+
batch_norm_param {
811+
use_global_stats: true
812+
eps: 2e-5
813+
}
814+
}
815+
816+
layer {
817+
name: "scale_stage2_unit2_bn3"
818+
bottom: "stage2_unit2_conv3"
819+
top: "stage2_unit2_conv3"
820+
type: "Scale"
821+
scale_param {
822+
bias_term: true
823+
}
824+
}
825+
826+
layer {
827+
name: "stage2_unit2_plus"
828+
type: "Eltwise"
829+
bottom: "stage2_unit1_plus"
830+
bottom: "stage2_unit2_conv3"
831+
top: "stage2_unit2_plus"
832+
eltwise_param {
833+
operation: SUM
834+
}
835+
}
836+
837+
layer {
838+
name: "stage2_unit2_relu"
839+
type: "ReLU"
840+
bottom: "stage2_unit2_plus"
841+
top: "stage2_unit2_plus"
842+
}
843+
844+
layer {
845+
name: "stage2_unit3_conv1"
846+
type: "Convolution"
847+
bottom: "stage2_unit2_plus"
848+
top: "stage2_unit3_conv1"
849+
convolution_param {
850+
num_output: 256
851+
kernel_size: 1
852+
stride: 1
853+
pad: 0
854+
bias_term: false
855+
}
856+
}
857+
858+
layer {
859+
name: "stage2_unit3_bn1"
860+
type: "BatchNorm"
861+
bottom: "stage2_unit3_conv1"
862+
top: "stage2_unit3_conv1"
863+
batch_norm_param {
864+
use_global_stats: true
865+
eps: 2e-5
866+
}
867+
}
868+
869+
layer {
870+
name: "scale_stage2_unit3_bn1"
871+
bottom: "stage2_unit3_conv1"
872+
top: "stage2_unit3_conv1"
873+
type: "Scale"
874+
scale_param {
875+
bias_term: true
876+
}
877+
}
878+
879+
layer {
880+
name: "stage2_unit3_relu1"
881+
type: "ReLU"
882+
bottom: "stage2_unit3_conv1"
883+
top: "stage2_unit3_conv1"
884+
}
885+
886+
layer {
887+
name: "stage2_unit3_conv2"
888+
type: "Convolution"
889+
bottom: "stage2_unit3_conv1"
890+
top: "stage2_unit3_conv2"
891+
convolution_param {
892+
num_output: 256
893+
kernel_size: 3
894+
stride: 1
895+
group: 32
896+
pad: 1
897+
bias_term: false
898+
}
899+
}
900+
901+
layer {
902+
name: "stage2_unit3_bn2"
903+
type: "BatchNorm"
904+
bottom: "stage2_unit3_conv2"
905+
top: "stage2_unit3_conv2"
906+
batch_norm_param {
907+
use_global_stats: true
908+
eps: 2e-5
909+
}
910+
}
911+
912+
layer {
913+
name: "scale_stage2_unit3_bn2"
914+
bottom: "stage2_unit3_conv2"
915+
top: "stage2_unit3_conv2"
916+
type: "Scale"
917+
scale_param {
918+
bias_term: true
919+
}
920+
}
921+
922+
layer {
923+
name: "stage2_unit3_relu2"
924+
type: "ReLU"
925+
bottom: "stage2_unit3_conv2"
926+
top: "stage2_unit3_conv2"
927+
}
928+
929+
layer {
930+
name: "stage2_unit3_conv3"
931+
type: "Convolution"
932+
bottom: "stage2_unit3_conv2"
933+
top: "stage2_unit3_conv3"
934+
convolution_param {
935+
num_output: 512
936+
kernel_size: 1
937+
stride: 1
938+
pad: 0
939+
bias_term: false
940+
}
941+
}
942+
943+
layer {
944+
name: "stage2_unit3_bn3"
945+
type: "BatchNorm"
946+
bottom: "stage2_unit3_conv3"
947+
top: "stage2_unit3_conv3"
948+
batch_norm_param {
949+
use_global_stats: true
950+
eps: 2e-5
951+
}
952+
}
953+
954+
layer {
955+
name: "scale_stage2_unit3_bn3"
956+
bottom: "stage2_unit3_conv3"
957+
top: "stage2_unit3_conv3"
958+
type: "Scale"
959+
scale_param {
960+
bias_term: true
961+
}
962+
}
963+
964+
layer {
965+
name: "stage2_unit3_plus"
966+
type: "Eltwise"
967+
bottom: "stage2_unit2_plus"
968+
bottom: "stage2_unit3_conv3"
969+
top: "stage2_unit3_plus"
970+
eltwise_param {
971+
operation: SUM
972+
}
973+
}
974+
975+
layer {
976+
name: "stage2_unit3_relu"
977+
type: "ReLU"
978+
bottom: "stage2_unit3_plus"
979+
top: "stage2_unit3_plus"
980+
}
981+
982+
layer {
983+
name: "stage2_unit4_conv1"
984+
type: "Convolution"
985+
bottom: "stage2_unit3_plus"
986+
top: "stage2_unit4_conv1"
987+
convolution_param {
988+
num_output: 256
989+
kernel_size: 1
990+
stride: 1
991+
pad: 0
992+
bias_term: false
993+
}
994+
}
995+
996+
layer {
997+
name: "stage2_unit4_bn1"
998+
type: "BatchNorm"
999+
bottom: "stage2_unit4_conv1"
1000+
top: "stage2_unit4_conv1"
1001+
batch_norm_param {
1002+
use_global_stats: true
1003+
eps: 2e-5
1004+
}
1005+
}
1006+
1007+
layer {
1008+
name: "scale_stage2_unit4_bn1"
1009+
bottom: "stage2_unit4_conv1"
1010+
top: "stage2_unit4_conv1"
1011+
type: "Scale"
1012+
scale_param {
1013+
bias_term: true
1014+
}
1015+
}
1016+
1017+
layer {
1018+
name: "stage2_unit4_relu1"
1019+
type: "ReLU"
1020+
bottom: "stage2_unit4_conv1"
1021+
top: "stage2_unit4_conv1"
1022+
}
1023+
1024+
layer {
1025+
name: "stage2_unit4_conv2"
1026+
type: "Convolution"
1027+
bottom: "stage2_unit4_conv1"
1028+
top: "stage2_unit4_conv2"
1029+
convolution_param {
1030+
num_output: 256
1031+
kernel_size: 3
1032+
stride: 1
1033+
group: 32
1034+
pad: 1
1035+
bias_term: false
1036+
}
1037+
}
1038+
1039+
layer {
1040+
name: "stage2_unit4_bn2"
1041+
type: "BatchNorm"
1042+
bottom: "stage2_unit4_conv2"
1043+
top: "stage2_unit4_conv2"
1044+
batch_norm_param {
1045+
use_global_stats: true
1046+
eps: 2e-5
1047+
}
1048+
}
1049+
1050+
layer {
1051+
name: "scale_stage2_unit4_bn2"
1052+
bottom: "stage2_unit4_conv2"
1053+
top: "stage2_unit4_conv2"
1054+
type: "Scale"
1055+
scale_param {
1056+
bias_term: true
1057+
}
1058+
}
1059+
1060+
layer {
1061+
name: "stage2_unit4_relu2"
1062+
type: "ReLU"
1063+
bottom: "stage2_unit4_conv2"
1064+
top: "stage2_unit4_conv2"
1065+
}
1066+
1067+
layer {
1068+
name: "stage2_unit4_conv3"
1069+
type: "Convolution"
1070+
bottom: "stage2_unit4_conv2"
1071+
top: "stage2_unit4_conv3"
1072+
convolution_param {
1073+
num_output: 512
1074+
kernel_size: 1
1075+
stride: 1
1076+
pad: 0
1077+
bias_term: false
1078+
}
1079+
}
1080+
1081+
layer {
1082+
name: "stage2_unit4_bn3"
1083+
type: "BatchNorm"
1084+
bottom: "stage2_unit4_conv3"
1085+
top: "stage2_unit4_conv3"
1086+
batch_norm_param {
1087+
use_global_stats: true
1088+
eps: 2e-5
1089+
}
1090+
}
1091+
1092+
layer {
1093+
name: "scale_stage2_unit4_bn3"
1094+
bottom: "stage2_unit4_conv3"
1095+
top: "stage2_unit4_conv3"
1096+
type: "Scale"
1097+
scale_param {
1098+
bias_term: true
1099+
}
1100+
}
1101+
1102+
layer {
1103+
name: "stage2_unit4_plus"
1104+
type: "Eltwise"
1105+
bottom: "stage2_unit3_plus"
1106+
bottom: "stage2_unit4_conv3"
1107+
top: "stage2_unit4_plus"
1108+
eltwise_param {
1109+
operation: SUM
1110+
}
1111+
}
1112+
1113+
layer {
1114+
name: "stage2_unit4_relu"
1115+
type: "ReLU"
1116+
bottom: "stage2_unit4_plus"
1117+
top: "stage2_unit4_plus"
1118+
}
1119+
1120+
layer {
1121+
name: "stage3_unit1_conv1"
1122+
type: "Convolution"
1123+
bottom: "stage2_unit4_plus"
1124+
top: "stage3_unit1_conv1"
1125+
convolution_param {
1126+
num_output: 512
1127+
kernel_size: 1
1128+
stride: 1
1129+
pad: 0
1130+
bias_term: false
1131+
}
1132+
}
1133+
1134+
layer {
1135+
name: "stage3_unit1_bn1"
1136+
type: "BatchNorm"
1137+
bottom: "stage3_unit1_conv1"
1138+
top: "stage3_unit1_conv1"
1139+
batch_norm_param {
1140+
use_global_stats: true
1141+
eps: 2e-5
1142+
}
1143+
}
1144+
1145+
layer {
1146+
name: "scale_stage3_unit1_bn1"
1147+
bottom: "stage3_unit1_conv1"
1148+
top: "stage3_unit1_conv1"
1149+
type: "Scale"
1150+
scale_param {
1151+
bias_term: true
1152+
}
1153+
}
1154+
1155+
layer {
1156+
name: "stage3_unit1_relu1"
1157+
type: "ReLU"
1158+
bottom: "stage3_unit1_conv1"
1159+
top: "stage3_unit1_conv1"
1160+
}
1161+
1162+
layer {
1163+
name: "stage3_unit1_conv2"
1164+
type: "Convolution"
1165+
bottom: "stage3_unit1_conv1"
1166+
top: "stage3_unit1_conv2"
1167+
convolution_param {
1168+
num_output: 512
1169+
kernel_size: 3
1170+
stride: 2
1171+
group: 32
1172+
pad: 1
1173+
bias_term: false
1174+
}
1175+
}
1176+
1177+
layer {
1178+
name: "stage3_unit1_bn2"
1179+
type: "BatchNorm"
1180+
bottom: "stage3_unit1_conv2"
1181+
top: "stage3_unit1_conv2"
1182+
batch_norm_param {
1183+
use_global_stats: true
1184+
eps: 2e-5
1185+
}
1186+
}
1187+
1188+
layer {
1189+
name: "scale_stage3_unit1_bn2"
1190+
bottom: "stage3_unit1_conv2"
1191+
top: "stage3_unit1_conv2"
1192+
type: "Scale"
1193+
scale_param {
1194+
bias_term: true
1195+
}
1196+
}
1197+
1198+
layer {
1199+
name: "stage3_unit1_relu2"
1200+
type: "ReLU"
1201+
bottom: "stage3_unit1_conv2"
1202+
top: "stage3_unit1_conv2"
1203+
}
1204+
1205+
layer {
1206+
name: "stage3_unit1_conv3"
1207+
type: "Convolution"
1208+
bottom: "stage3_unit1_conv2"
1209+
top: "stage3_unit1_conv3"
1210+
convolution_param {
1211+
num_output: 1024
1212+
kernel_size: 1
1213+
stride: 1
1214+
pad: 0
1215+
bias_term: false
1216+
}
1217+
}
1218+
1219+
layer {
1220+
name: "stage3_unit1_bn3"
1221+
type: "BatchNorm"
1222+
bottom: "stage3_unit1_conv3"
1223+
top: "stage3_unit1_conv3"
1224+
batch_norm_param {
1225+
use_global_stats: true
1226+
eps: 2e-5
1227+
}
1228+
}
1229+
1230+
layer {
1231+
name: "scale_stage3_unit1_bn3"
1232+
bottom: "stage3_unit1_conv3"
1233+
top: "stage3_unit1_conv3"
1234+
type: "Scale"
1235+
scale_param {
1236+
bias_term: true
1237+
}
1238+
}
1239+
1240+
layer {
1241+
name: "stage3_unit1_sc"
1242+
type: "Convolution"
1243+
bottom: "stage2_unit4_plus"
1244+
top: "stage3_unit1_sc"
1245+
convolution_param {
1246+
num_output: 1024
1247+
kernel_size: 1
1248+
stride: 2
1249+
pad: 0
1250+
bias_term: false
1251+
}
1252+
}
1253+
1254+
layer {
1255+
name: "stage3_unit1_sc_bn"
1256+
type: "BatchNorm"
1257+
bottom: "stage3_unit1_sc"
1258+
top: "stage3_unit1_sc"
1259+
batch_norm_param {
1260+
use_global_stats: true
1261+
eps: 2e-5
1262+
}
1263+
}
1264+
1265+
layer {
1266+
name: "scale_stage3_unit1_sc_bn"
1267+
bottom: "stage3_unit1_sc"
1268+
top: "stage3_unit1_sc"
1269+
type: "Scale"
1270+
scale_param {
1271+
bias_term: true
1272+
}
1273+
}
1274+
1275+
layer {
1276+
name: "stage3_unit1_plus"
1277+
type: "Eltwise"
1278+
bottom: "stage3_unit1_sc"
1279+
bottom: "stage3_unit1_conv3"
1280+
top: "stage3_unit1_plus"
1281+
eltwise_param {
1282+
operation: SUM
1283+
}
1284+
}
1285+
1286+
layer {
1287+
name: "stage3_unit1_relu"
1288+
type: "ReLU"
1289+
bottom: "stage3_unit1_plus"
1290+
top: "stage3_unit1_plus"
1291+
}
1292+
1293+
layer {
1294+
name: "stage3_unit2_conv1"
1295+
type: "Convolution"
1296+
bottom: "stage3_unit1_plus"
1297+
top: "stage3_unit2_conv1"
1298+
convolution_param {
1299+
num_output: 512
1300+
kernel_size: 1
1301+
stride: 1
1302+
pad: 0
1303+
bias_term: false
1304+
}
1305+
}
1306+
1307+
layer {
1308+
name: "stage3_unit2_bn1"
1309+
type: "BatchNorm"
1310+
bottom: "stage3_unit2_conv1"
1311+
top: "stage3_unit2_conv1"
1312+
batch_norm_param {
1313+
use_global_stats: true
1314+
eps: 2e-5
1315+
}
1316+
}
1317+
1318+
layer {
1319+
name: "scale_stage3_unit2_bn1"
1320+
bottom: "stage3_unit2_conv1"
1321+
top: "stage3_unit2_conv1"
1322+
type: "Scale"
1323+
scale_param {
1324+
bias_term: true
1325+
}
1326+
}
1327+
1328+
layer {
1329+
name: "stage3_unit2_relu1"
1330+
type: "ReLU"
1331+
bottom: "stage3_unit2_conv1"
1332+
top: "stage3_unit2_conv1"
1333+
}
1334+
1335+
layer {
1336+
name: "stage3_unit2_conv2"
1337+
type: "Convolution"
1338+
bottom: "stage3_unit2_conv1"
1339+
top: "stage3_unit2_conv2"
1340+
convolution_param {
1341+
num_output: 512
1342+
kernel_size: 3
1343+
stride: 1
1344+
group: 32
1345+
pad: 1
1346+
bias_term: false
1347+
}
1348+
}
1349+
1350+
layer {
1351+
name: "stage3_unit2_bn2"
1352+
type: "BatchNorm"
1353+
bottom: "stage3_unit2_conv2"
1354+
top: "stage3_unit2_conv2"
1355+
batch_norm_param {
1356+
use_global_stats: true
1357+
eps: 2e-5
1358+
}
1359+
}
1360+
1361+
layer {
1362+
name: "scale_stage3_unit2_bn2"
1363+
bottom: "stage3_unit2_conv2"
1364+
top: "stage3_unit2_conv2"
1365+
type: "Scale"
1366+
scale_param {
1367+
bias_term: true
1368+
}
1369+
}
1370+
1371+
layer {
1372+
name: "stage3_unit2_relu2"
1373+
type: "ReLU"
1374+
bottom: "stage3_unit2_conv2"
1375+
top: "stage3_unit2_conv2"
1376+
}
1377+
1378+
layer {
1379+
name: "stage3_unit2_conv3"
1380+
type: "Convolution"
1381+
bottom: "stage3_unit2_conv2"
1382+
top: "stage3_unit2_conv3"
1383+
convolution_param {
1384+
num_output: 1024
1385+
kernel_size: 1
1386+
stride: 1
1387+
pad: 0
1388+
bias_term: false
1389+
}
1390+
}
1391+
1392+
layer {
1393+
name: "stage3_unit2_bn3"
1394+
type: "BatchNorm"
1395+
bottom: "stage3_unit2_conv3"
1396+
top: "stage3_unit2_conv3"
1397+
batch_norm_param {
1398+
use_global_stats: true
1399+
eps: 2e-5
1400+
}
1401+
}
1402+
1403+
layer {
1404+
name: "scale_stage3_unit2_bn3"
1405+
bottom: "stage3_unit2_conv3"
1406+
top: "stage3_unit2_conv3"
1407+
type: "Scale"
1408+
scale_param {
1409+
bias_term: true
1410+
}
1411+
}
1412+
1413+
layer {
1414+
name: "stage3_unit2_plus"
1415+
type: "Eltwise"
1416+
bottom: "stage3_unit1_plus"
1417+
bottom: "stage3_unit2_conv3"
1418+
top: "stage3_unit2_plus"
1419+
eltwise_param {
1420+
operation: SUM
1421+
}
1422+
}
1423+
1424+
layer {
1425+
name: "stage3_unit2_relu"
1426+
type: "ReLU"
1427+
bottom: "stage3_unit2_plus"
1428+
top: "stage3_unit2_plus"
1429+
}
1430+
1431+
layer {
1432+
name: "stage3_unit3_conv1"
1433+
type: "Convolution"
1434+
bottom: "stage3_unit2_plus"
1435+
top: "stage3_unit3_conv1"
1436+
convolution_param {
1437+
num_output: 512
1438+
kernel_size: 1
1439+
stride: 1
1440+
pad: 0
1441+
bias_term: false
1442+
}
1443+
}
1444+
1445+
layer {
1446+
name: "stage3_unit3_bn1"
1447+
type: "BatchNorm"
1448+
bottom: "stage3_unit3_conv1"
1449+
top: "stage3_unit3_conv1"
1450+
batch_norm_param {
1451+
use_global_stats: true
1452+
eps: 2e-5
1453+
}
1454+
}
1455+
1456+
layer {
1457+
name: "scale_stage3_unit3_bn1"
1458+
bottom: "stage3_unit3_conv1"
1459+
top: "stage3_unit3_conv1"
1460+
type: "Scale"
1461+
scale_param {
1462+
bias_term: true
1463+
}
1464+
}
1465+
1466+
layer {
1467+
name: "stage3_unit3_relu1"
1468+
type: "ReLU"
1469+
bottom: "stage3_unit3_conv1"
1470+
top: "stage3_unit3_conv1"
1471+
}
1472+
1473+
layer {
1474+
name: "stage3_unit3_conv2"
1475+
type: "Convolution"
1476+
bottom: "stage3_unit3_conv1"
1477+
top: "stage3_unit3_conv2"
1478+
convolution_param {
1479+
num_output: 512
1480+
kernel_size: 3
1481+
stride: 1
1482+
group: 32
1483+
pad: 1
1484+
bias_term: false
1485+
}
1486+
}
1487+
1488+
layer {
1489+
name: "stage3_unit3_bn2"
1490+
type: "BatchNorm"
1491+
bottom: "stage3_unit3_conv2"
1492+
top: "stage3_unit3_conv2"
1493+
batch_norm_param {
1494+
use_global_stats: true
1495+
eps: 2e-5
1496+
}
1497+
}
1498+
1499+
layer {
1500+
name: "scale_stage3_unit3_bn2"
1501+
bottom: "stage3_unit3_conv2"
1502+
top: "stage3_unit3_conv2"
1503+
type: "Scale"
1504+
scale_param {
1505+
bias_term: true
1506+
}
1507+
}
1508+
1509+
layer {
1510+
name: "stage3_unit3_relu2"
1511+
type: "ReLU"
1512+
bottom: "stage3_unit3_conv2"
1513+
top: "stage3_unit3_conv2"
1514+
}
1515+
1516+
layer {
1517+
name: "stage3_unit3_conv3"
1518+
type: "Convolution"
1519+
bottom: "stage3_unit3_conv2"
1520+
top: "stage3_unit3_conv3"
1521+
convolution_param {
1522+
num_output: 1024
1523+
kernel_size: 1
1524+
stride: 1
1525+
pad: 0
1526+
bias_term: false
1527+
}
1528+
}
1529+
1530+
layer {
1531+
name: "stage3_unit3_bn3"
1532+
type: "BatchNorm"
1533+
bottom: "stage3_unit3_conv3"
1534+
top: "stage3_unit3_conv3"
1535+
batch_norm_param {
1536+
use_global_stats: true
1537+
eps: 2e-5
1538+
}
1539+
}
1540+
1541+
layer {
1542+
name: "scale_stage3_unit3_bn3"
1543+
bottom: "stage3_unit3_conv3"
1544+
top: "stage3_unit3_conv3"
1545+
type: "Scale"
1546+
scale_param {
1547+
bias_term: true
1548+
}
1549+
}
1550+
1551+
layer {
1552+
name: "stage3_unit3_plus"
1553+
type: "Eltwise"
1554+
bottom: "stage3_unit2_plus"
1555+
bottom: "stage3_unit3_conv3"
1556+
top: "stage3_unit3_plus"
1557+
eltwise_param {
1558+
operation: SUM
1559+
}
1560+
}
1561+
1562+
layer {
1563+
name: "stage3_unit3_relu"
1564+
type: "ReLU"
1565+
bottom: "stage3_unit3_plus"
1566+
top: "stage3_unit3_plus"
1567+
}
1568+
1569+
layer {
1570+
name: "stage3_unit4_conv1"
1571+
type: "Convolution"
1572+
bottom: "stage3_unit3_plus"
1573+
top: "stage3_unit4_conv1"
1574+
convolution_param {
1575+
num_output: 512
1576+
kernel_size: 1
1577+
stride: 1
1578+
pad: 0
1579+
bias_term: false
1580+
}
1581+
}
1582+
1583+
layer {
1584+
name: "stage3_unit4_bn1"
1585+
type: "BatchNorm"
1586+
bottom: "stage3_unit4_conv1"
1587+
top: "stage3_unit4_conv1"
1588+
batch_norm_param {
1589+
use_global_stats: true
1590+
eps: 2e-5
1591+
}
1592+
}
1593+
1594+
layer {
1595+
name: "scale_stage3_unit4_bn1"
1596+
bottom: "stage3_unit4_conv1"
1597+
top: "stage3_unit4_conv1"
1598+
type: "Scale"
1599+
scale_param {
1600+
bias_term: true
1601+
}
1602+
}
1603+
1604+
layer {
1605+
name: "stage3_unit4_relu1"
1606+
type: "ReLU"
1607+
bottom: "stage3_unit4_conv1"
1608+
top: "stage3_unit4_conv1"
1609+
}
1610+
1611+
layer {
1612+
name: "stage3_unit4_conv2"
1613+
type: "Convolution"
1614+
bottom: "stage3_unit4_conv1"
1615+
top: "stage3_unit4_conv2"
1616+
convolution_param {
1617+
num_output: 512
1618+
kernel_size: 3
1619+
stride: 1
1620+
group: 32
1621+
pad: 1
1622+
bias_term: false
1623+
}
1624+
}
1625+
1626+
layer {
1627+
name: "stage3_unit4_bn2"
1628+
type: "BatchNorm"
1629+
bottom: "stage3_unit4_conv2"
1630+
top: "stage3_unit4_conv2"
1631+
batch_norm_param {
1632+
use_global_stats: true
1633+
eps: 2e-5
1634+
}
1635+
}
1636+
1637+
layer {
1638+
name: "scale_stage3_unit4_bn2"
1639+
bottom: "stage3_unit4_conv2"
1640+
top: "stage3_unit4_conv2"
1641+
type: "Scale"
1642+
scale_param {
1643+
bias_term: true
1644+
}
1645+
}
1646+
1647+
layer {
1648+
name: "stage3_unit4_relu2"
1649+
type: "ReLU"
1650+
bottom: "stage3_unit4_conv2"
1651+
top: "stage3_unit4_conv2"
1652+
}
1653+
1654+
layer {
1655+
name: "stage3_unit4_conv3"
1656+
type: "Convolution"
1657+
bottom: "stage3_unit4_conv2"
1658+
top: "stage3_unit4_conv3"
1659+
convolution_param {
1660+
num_output: 1024
1661+
kernel_size: 1
1662+
stride: 1
1663+
pad: 0
1664+
bias_term: false
1665+
}
1666+
}
1667+
1668+
layer {
1669+
name: "stage3_unit4_bn3"
1670+
type: "BatchNorm"
1671+
bottom: "stage3_unit4_conv3"
1672+
top: "stage3_unit4_conv3"
1673+
batch_norm_param {
1674+
use_global_stats: true
1675+
eps: 2e-5
1676+
}
1677+
}
1678+
1679+
layer {
1680+
name: "scale_stage3_unit4_bn3"
1681+
bottom: "stage3_unit4_conv3"
1682+
top: "stage3_unit4_conv3"
1683+
type: "Scale"
1684+
scale_param {
1685+
bias_term: true
1686+
}
1687+
}
1688+
1689+
layer {
1690+
name: "stage3_unit4_plus"
1691+
type: "Eltwise"
1692+
bottom: "stage3_unit3_plus"
1693+
bottom: "stage3_unit4_conv3"
1694+
top: "stage3_unit4_plus"
1695+
eltwise_param {
1696+
operation: SUM
1697+
}
1698+
}
1699+
1700+
layer {
1701+
name: "stage3_unit4_relu"
1702+
type: "ReLU"
1703+
bottom: "stage3_unit4_plus"
1704+
top: "stage3_unit4_plus"
1705+
}
1706+
1707+
layer {
1708+
name: "stage3_unit5_conv1"
1709+
type: "Convolution"
1710+
bottom: "stage3_unit4_plus"
1711+
top: "stage3_unit5_conv1"
1712+
convolution_param {
1713+
num_output: 512
1714+
kernel_size: 1
1715+
stride: 1
1716+
pad: 0
1717+
bias_term: false
1718+
}
1719+
}
1720+
1721+
layer {
1722+
name: "stage3_unit5_bn1"
1723+
type: "BatchNorm"
1724+
bottom: "stage3_unit5_conv1"
1725+
top: "stage3_unit5_conv1"
1726+
batch_norm_param {
1727+
use_global_stats: true
1728+
eps: 2e-5
1729+
}
1730+
}
1731+
1732+
layer {
1733+
name: "scale_stage3_unit5_bn1"
1734+
bottom: "stage3_unit5_conv1"
1735+
top: "stage3_unit5_conv1"
1736+
type: "Scale"
1737+
scale_param {
1738+
bias_term: true
1739+
}
1740+
}
1741+
1742+
layer {
1743+
name: "stage3_unit5_relu1"
1744+
type: "ReLU"
1745+
bottom: "stage3_unit5_conv1"
1746+
top: "stage3_unit5_conv1"
1747+
}
1748+
1749+
layer {
1750+
name: "stage3_unit5_conv2"
1751+
type: "Convolution"
1752+
bottom: "stage3_unit5_conv1"
1753+
top: "stage3_unit5_conv2"
1754+
convolution_param {
1755+
num_output: 512
1756+
kernel_size: 3
1757+
stride: 1
1758+
group: 32
1759+
pad: 1
1760+
bias_term: false
1761+
}
1762+
}
1763+
1764+
layer {
1765+
name: "stage3_unit5_bn2"
1766+
type: "BatchNorm"
1767+
bottom: "stage3_unit5_conv2"
1768+
top: "stage3_unit5_conv2"
1769+
batch_norm_param {
1770+
use_global_stats: true
1771+
eps: 2e-5
1772+
}
1773+
}
1774+
1775+
layer {
1776+
name: "scale_stage3_unit5_bn2"
1777+
bottom: "stage3_unit5_conv2"
1778+
top: "stage3_unit5_conv2"
1779+
type: "Scale"
1780+
scale_param {
1781+
bias_term: true
1782+
}
1783+
}
1784+
1785+
layer {
1786+
name: "stage3_unit5_relu2"
1787+
type: "ReLU"
1788+
bottom: "stage3_unit5_conv2"
1789+
top: "stage3_unit5_conv2"
1790+
}
1791+
1792+
layer {
1793+
name: "stage3_unit5_conv3"
1794+
type: "Convolution"
1795+
bottom: "stage3_unit5_conv2"
1796+
top: "stage3_unit5_conv3"
1797+
convolution_param {
1798+
num_output: 1024
1799+
kernel_size: 1
1800+
stride: 1
1801+
pad: 0
1802+
bias_term: false
1803+
}
1804+
}
1805+
1806+
layer {
1807+
name: "stage3_unit5_bn3"
1808+
type: "BatchNorm"
1809+
bottom: "stage3_unit5_conv3"
1810+
top: "stage3_unit5_conv3"
1811+
batch_norm_param {
1812+
use_global_stats: true
1813+
eps: 2e-5
1814+
}
1815+
}
1816+
1817+
layer {
1818+
name: "scale_stage3_unit5_bn3"
1819+
bottom: "stage3_unit5_conv3"
1820+
top: "stage3_unit5_conv3"
1821+
type: "Scale"
1822+
scale_param {
1823+
bias_term: true
1824+
}
1825+
}
1826+
1827+
layer {
1828+
name: "stage3_unit5_plus"
1829+
type: "Eltwise"
1830+
bottom: "stage3_unit4_plus"
1831+
bottom: "stage3_unit5_conv3"
1832+
top: "stage3_unit5_plus"
1833+
eltwise_param {
1834+
operation: SUM
1835+
}
1836+
}
1837+
1838+
layer {
1839+
name: "stage3_unit5_relu"
1840+
type: "ReLU"
1841+
bottom: "stage3_unit5_plus"
1842+
top: "stage3_unit5_plus"
1843+
}
1844+
1845+
layer {
1846+
name: "stage3_unit6_conv1"
1847+
type: "Convolution"
1848+
bottom: "stage3_unit5_plus"
1849+
top: "stage3_unit6_conv1"
1850+
convolution_param {
1851+
num_output: 512
1852+
kernel_size: 1
1853+
stride: 1
1854+
pad: 0
1855+
bias_term: false
1856+
}
1857+
}
1858+
1859+
layer {
1860+
name: "stage3_unit6_bn1"
1861+
type: "BatchNorm"
1862+
bottom: "stage3_unit6_conv1"
1863+
top: "stage3_unit6_conv1"
1864+
batch_norm_param {
1865+
use_global_stats: true
1866+
eps: 2e-5
1867+
}
1868+
}
1869+
1870+
layer {
1871+
name: "scale_stage3_unit6_bn1"
1872+
bottom: "stage3_unit6_conv1"
1873+
top: "stage3_unit6_conv1"
1874+
type: "Scale"
1875+
scale_param {
1876+
bias_term: true
1877+
}
1878+
}
1879+
1880+
layer {
1881+
name: "stage3_unit6_relu1"
1882+
type: "ReLU"
1883+
bottom: "stage3_unit6_conv1"
1884+
top: "stage3_unit6_conv1"
1885+
}
1886+
1887+
layer {
1888+
name: "stage3_unit6_conv2"
1889+
type: "Convolution"
1890+
bottom: "stage3_unit6_conv1"
1891+
top: "stage3_unit6_conv2"
1892+
convolution_param {
1893+
num_output: 512
1894+
kernel_size: 3
1895+
stride: 1
1896+
group: 32
1897+
pad: 1
1898+
bias_term: false
1899+
}
1900+
}
1901+
1902+
layer {
1903+
name: "stage3_unit6_bn2"
1904+
type: "BatchNorm"
1905+
bottom: "stage3_unit6_conv2"
1906+
top: "stage3_unit6_conv2"
1907+
batch_norm_param {
1908+
use_global_stats: true
1909+
eps: 2e-5
1910+
}
1911+
}
1912+
1913+
layer {
1914+
name: "scale_stage3_unit6_bn2"
1915+
bottom: "stage3_unit6_conv2"
1916+
top: "stage3_unit6_conv2"
1917+
type: "Scale"
1918+
scale_param {
1919+
bias_term: true
1920+
}
1921+
}
1922+
1923+
layer {
1924+
name: "stage3_unit6_relu2"
1925+
type: "ReLU"
1926+
bottom: "stage3_unit6_conv2"
1927+
top: "stage3_unit6_conv2"
1928+
}
1929+
1930+
layer {
1931+
name: "stage3_unit6_conv3"
1932+
type: "Convolution"
1933+
bottom: "stage3_unit6_conv2"
1934+
top: "stage3_unit6_conv3"
1935+
convolution_param {
1936+
num_output: 1024
1937+
kernel_size: 1
1938+
stride: 1
1939+
pad: 0
1940+
bias_term: false
1941+
}
1942+
}
1943+
1944+
layer {
1945+
name: "stage3_unit6_bn3"
1946+
type: "BatchNorm"
1947+
bottom: "stage3_unit6_conv3"
1948+
top: "stage3_unit6_conv3"
1949+
batch_norm_param {
1950+
use_global_stats: true
1951+
eps: 2e-5
1952+
}
1953+
}
1954+
1955+
layer {
1956+
name: "scale_stage3_unit6_bn3"
1957+
bottom: "stage3_unit6_conv3"
1958+
top: "stage3_unit6_conv3"
1959+
type: "Scale"
1960+
scale_param {
1961+
bias_term: true
1962+
}
1963+
}
1964+
1965+
layer {
1966+
name: "stage3_unit6_plus"
1967+
type: "Eltwise"
1968+
bottom: "stage3_unit5_plus"
1969+
bottom: "stage3_unit6_conv3"
1970+
top: "stage3_unit6_plus"
1971+
eltwise_param {
1972+
operation: SUM
1973+
}
1974+
}
1975+
1976+
layer {
1977+
name: "stage3_unit6_relu"
1978+
type: "ReLU"
1979+
bottom: "stage3_unit6_plus"
1980+
top: "stage3_unit6_plus"
1981+
}
1982+
1983+
layer {
1984+
name: "stage4_unit1_conv1"
1985+
type: "Convolution"
1986+
bottom: "stage3_unit6_plus"
1987+
top: "stage4_unit1_conv1"
1988+
convolution_param {
1989+
num_output: 1024
1990+
kernel_size: 1
1991+
stride: 1
1992+
pad: 0
1993+
bias_term: false
1994+
}
1995+
}
1996+
1997+
layer {
1998+
name: "stage4_unit1_bn1"
1999+
type: "BatchNorm"
2000+
bottom: "stage4_unit1_conv1"
2001+
top: "stage4_unit1_conv1"
2002+
batch_norm_param {
2003+
use_global_stats: true
2004+
eps: 2e-5
2005+
}
2006+
}
2007+
2008+
layer {
2009+
name: "scale_stage4_unit1_bn1"
2010+
bottom: "stage4_unit1_conv1"
2011+
top: "stage4_unit1_conv1"
2012+
type: "Scale"
2013+
scale_param {
2014+
bias_term: true
2015+
}
2016+
}
2017+
2018+
layer {
2019+
name: "stage4_unit1_relu1"
2020+
type: "ReLU"
2021+
bottom: "stage4_unit1_conv1"
2022+
top: "stage4_unit1_conv1"
2023+
}
2024+
2025+
layer {
2026+
name: "stage4_unit1_conv2"
2027+
type: "Convolution"
2028+
bottom: "stage4_unit1_conv1"
2029+
top: "stage4_unit1_conv2"
2030+
convolution_param {
2031+
num_output: 1024
2032+
kernel_size: 3
2033+
stride: 2
2034+
group: 32
2035+
pad: 1
2036+
bias_term: false
2037+
}
2038+
}
2039+
2040+
layer {
2041+
name: "stage4_unit1_bn2"
2042+
type: "BatchNorm"
2043+
bottom: "stage4_unit1_conv2"
2044+
top: "stage4_unit1_conv2"
2045+
batch_norm_param {
2046+
use_global_stats: true
2047+
eps: 2e-5
2048+
}
2049+
}
2050+
2051+
layer {
2052+
name: "scale_stage4_unit1_bn2"
2053+
bottom: "stage4_unit1_conv2"
2054+
top: "stage4_unit1_conv2"
2055+
type: "Scale"
2056+
scale_param {
2057+
bias_term: true
2058+
}
2059+
}
2060+
2061+
layer {
2062+
name: "stage4_unit1_relu2"
2063+
type: "ReLU"
2064+
bottom: "stage4_unit1_conv2"
2065+
top: "stage4_unit1_conv2"
2066+
}
2067+
2068+
layer {
2069+
name: "stage4_unit1_conv3"
2070+
type: "Convolution"
2071+
bottom: "stage4_unit1_conv2"
2072+
top: "stage4_unit1_conv3"
2073+
convolution_param {
2074+
num_output: 2048
2075+
kernel_size: 1
2076+
stride: 1
2077+
pad: 0
2078+
bias_term: false
2079+
}
2080+
}
2081+
2082+
layer {
2083+
name: "stage4_unit1_bn3"
2084+
type: "BatchNorm"
2085+
bottom: "stage4_unit1_conv3"
2086+
top: "stage4_unit1_conv3"
2087+
batch_norm_param {
2088+
use_global_stats: true
2089+
eps: 2e-5
2090+
}
2091+
}
2092+
2093+
layer {
2094+
name: "scale_stage4_unit1_bn3"
2095+
bottom: "stage4_unit1_conv3"
2096+
top: "stage4_unit1_conv3"
2097+
type: "Scale"
2098+
scale_param {
2099+
bias_term: true
2100+
}
2101+
}
2102+
2103+
layer {
2104+
name: "stage4_unit1_sc"
2105+
type: "Convolution"
2106+
bottom: "stage3_unit6_plus"
2107+
top: "stage4_unit1_sc"
2108+
convolution_param {
2109+
num_output: 2048
2110+
kernel_size: 1
2111+
stride: 2
2112+
pad: 0
2113+
bias_term: false
2114+
}
2115+
}
2116+
2117+
layer {
2118+
name: "stage4_unit1_sc_bn"
2119+
type: "BatchNorm"
2120+
bottom: "stage4_unit1_sc"
2121+
top: "stage4_unit1_sc"
2122+
batch_norm_param {
2123+
use_global_stats: true
2124+
eps: 2e-5
2125+
}
2126+
}
2127+
2128+
layer {
2129+
name: "scale_stage4_unit1_sc_bn"
2130+
bottom: "stage4_unit1_sc"
2131+
top: "stage4_unit1_sc"
2132+
type: "Scale"
2133+
scale_param {
2134+
bias_term: true
2135+
}
2136+
}
2137+
2138+
layer {
2139+
name: "stage4_unit1_plus"
2140+
type: "Eltwise"
2141+
bottom: "stage4_unit1_sc"
2142+
bottom: "stage4_unit1_conv3"
2143+
top: "stage4_unit1_plus"
2144+
eltwise_param {
2145+
operation: SUM
2146+
}
2147+
}
2148+
2149+
layer {
2150+
name: "stage4_unit1_relu"
2151+
type: "ReLU"
2152+
bottom: "stage4_unit1_plus"
2153+
top: "stage4_unit1_plus"
2154+
}
2155+
2156+
layer {
2157+
name: "stage4_unit2_conv1"
2158+
type: "Convolution"
2159+
bottom: "stage4_unit1_plus"
2160+
top: "stage4_unit2_conv1"
2161+
convolution_param {
2162+
num_output: 1024
2163+
kernel_size: 1
2164+
stride: 1
2165+
pad: 0
2166+
bias_term: false
2167+
}
2168+
}
2169+
2170+
layer {
2171+
name: "stage4_unit2_bn1"
2172+
type: "BatchNorm"
2173+
bottom: "stage4_unit2_conv1"
2174+
top: "stage4_unit2_conv1"
2175+
batch_norm_param {
2176+
use_global_stats: true
2177+
eps: 2e-5
2178+
}
2179+
}
2180+
2181+
layer {
2182+
name: "scale_stage4_unit2_bn1"
2183+
bottom: "stage4_unit2_conv1"
2184+
top: "stage4_unit2_conv1"
2185+
type: "Scale"
2186+
scale_param {
2187+
bias_term: true
2188+
}
2189+
}
2190+
2191+
layer {
2192+
name: "stage4_unit2_relu1"
2193+
type: "ReLU"
2194+
bottom: "stage4_unit2_conv1"
2195+
top: "stage4_unit2_conv1"
2196+
}
2197+
2198+
layer {
2199+
name: "stage4_unit2_conv2"
2200+
type: "Convolution"
2201+
bottom: "stage4_unit2_conv1"
2202+
top: "stage4_unit2_conv2"
2203+
convolution_param {
2204+
num_output: 1024
2205+
kernel_size: 3
2206+
stride: 1
2207+
group: 32
2208+
pad: 1
2209+
bias_term: false
2210+
}
2211+
}
2212+
2213+
layer {
2214+
name: "stage4_unit2_bn2"
2215+
type: "BatchNorm"
2216+
bottom: "stage4_unit2_conv2"
2217+
top: "stage4_unit2_conv2"
2218+
batch_norm_param {
2219+
use_global_stats: true
2220+
eps: 2e-5
2221+
}
2222+
}
2223+
2224+
layer {
2225+
name: "scale_stage4_unit2_bn2"
2226+
bottom: "stage4_unit2_conv2"
2227+
top: "stage4_unit2_conv2"
2228+
type: "Scale"
2229+
scale_param {
2230+
bias_term: true
2231+
}
2232+
}
2233+
2234+
layer {
2235+
name: "stage4_unit2_relu2"
2236+
type: "ReLU"
2237+
bottom: "stage4_unit2_conv2"
2238+
top: "stage4_unit2_conv2"
2239+
}
2240+
2241+
layer {
2242+
name: "stage4_unit2_conv3"
2243+
type: "Convolution"
2244+
bottom: "stage4_unit2_conv2"
2245+
top: "stage4_unit2_conv3"
2246+
convolution_param {
2247+
num_output: 2048
2248+
kernel_size: 1
2249+
stride: 1
2250+
pad: 0
2251+
bias_term: false
2252+
}
2253+
}
2254+
2255+
layer {
2256+
name: "stage4_unit2_bn3"
2257+
type: "BatchNorm"
2258+
bottom: "stage4_unit2_conv3"
2259+
top: "stage4_unit2_conv3"
2260+
batch_norm_param {
2261+
use_global_stats: true
2262+
eps: 2e-5
2263+
}
2264+
}
2265+
2266+
layer {
2267+
name: "scale_stage4_unit2_bn3"
2268+
bottom: "stage4_unit2_conv3"
2269+
top: "stage4_unit2_conv3"
2270+
type: "Scale"
2271+
scale_param {
2272+
bias_term: true
2273+
}
2274+
}
2275+
2276+
layer {
2277+
name: "stage4_unit2_plus"
2278+
type: "Eltwise"
2279+
bottom: "stage4_unit1_plus"
2280+
bottom: "stage4_unit2_conv3"
2281+
top: "stage4_unit2_plus"
2282+
eltwise_param {
2283+
operation: SUM
2284+
}
2285+
}
2286+
2287+
layer {
2288+
name: "stage4_unit2_relu"
2289+
type: "ReLU"
2290+
bottom: "stage4_unit2_plus"
2291+
top: "stage4_unit2_plus"
2292+
}
2293+
2294+
layer {
2295+
name: "stage4_unit3_conv1"
2296+
type: "Convolution"
2297+
bottom: "stage4_unit2_plus"
2298+
top: "stage4_unit3_conv1"
2299+
convolution_param {
2300+
num_output: 1024
2301+
kernel_size: 1
2302+
stride: 1
2303+
pad: 0
2304+
bias_term: false
2305+
}
2306+
}
2307+
2308+
layer {
2309+
name: "stage4_unit3_bn1"
2310+
type: "BatchNorm"
2311+
bottom: "stage4_unit3_conv1"
2312+
top: "stage4_unit3_conv1"
2313+
batch_norm_param {
2314+
use_global_stats: true
2315+
eps: 2e-5
2316+
}
2317+
}
2318+
2319+
layer {
2320+
name: "scale_stage4_unit3_bn1"
2321+
bottom: "stage4_unit3_conv1"
2322+
top: "stage4_unit3_conv1"
2323+
type: "Scale"
2324+
scale_param {
2325+
bias_term: true
2326+
}
2327+
}
2328+
2329+
layer {
2330+
name: "stage4_unit3_relu1"
2331+
type: "ReLU"
2332+
bottom: "stage4_unit3_conv1"
2333+
top: "stage4_unit3_conv1"
2334+
}
2335+
2336+
layer {
2337+
name: "stage4_unit3_conv2"
2338+
type: "Convolution"
2339+
bottom: "stage4_unit3_conv1"
2340+
top: "stage4_unit3_conv2"
2341+
convolution_param {
2342+
num_output: 1024
2343+
kernel_size: 3
2344+
stride: 1
2345+
group: 32
2346+
pad: 1
2347+
bias_term: false
2348+
}
2349+
}
2350+
2351+
layer {
2352+
name: "stage4_unit3_bn2"
2353+
type: "BatchNorm"
2354+
bottom: "stage4_unit3_conv2"
2355+
top: "stage4_unit3_conv2"
2356+
batch_norm_param {
2357+
use_global_stats: true
2358+
eps: 2e-5
2359+
}
2360+
}
2361+
2362+
layer {
2363+
name: "scale_stage4_unit3_bn2"
2364+
bottom: "stage4_unit3_conv2"
2365+
top: "stage4_unit3_conv2"
2366+
type: "Scale"
2367+
scale_param {
2368+
bias_term: true
2369+
}
2370+
}
2371+
2372+
layer {
2373+
name: "stage4_unit3_relu2"
2374+
type: "ReLU"
2375+
bottom: "stage4_unit3_conv2"
2376+
top: "stage4_unit3_conv2"
2377+
}
2378+
2379+
layer {
2380+
name: "stage4_unit3_conv3"
2381+
type: "Convolution"
2382+
bottom: "stage4_unit3_conv2"
2383+
top: "stage4_unit3_conv3"
2384+
convolution_param {
2385+
num_output: 2048
2386+
kernel_size: 1
2387+
stride: 1
2388+
pad: 0
2389+
bias_term: false
2390+
}
2391+
}
2392+
2393+
layer {
2394+
name: "stage4_unit3_bn3"
2395+
type: "BatchNorm"
2396+
bottom: "stage4_unit3_conv3"
2397+
top: "stage4_unit3_conv3"
2398+
batch_norm_param {
2399+
use_global_stats: true
2400+
eps: 2e-5
2401+
}
2402+
}
2403+
2404+
layer {
2405+
name: "scale_stage4_unit3_bn3"
2406+
bottom: "stage4_unit3_conv3"
2407+
top: "stage4_unit3_conv3"
2408+
type: "Scale"
2409+
scale_param {
2410+
bias_term: true
2411+
}
2412+
}
2413+
2414+
layer {
2415+
name: "stage4_unit3_plus"
2416+
type: "Eltwise"
2417+
bottom: "stage4_unit2_plus"
2418+
bottom: "stage4_unit3_conv3"
2419+
top: "stage4_unit3_plus"
2420+
eltwise_param {
2421+
operation: SUM
2422+
}
2423+
}
2424+
2425+
layer {
2426+
name: "stage4_unit3_relu"
2427+
type: "ReLU"
2428+
bottom: "stage4_unit3_plus"
2429+
top: "stage4_unit3_plus"
2430+
}
2431+
2432+
layer {
2433+
name: "pool1"
2434+
type: "Pooling"
2435+
bottom: "stage4_unit3_plus"
2436+
top: "pool1"
2437+
pooling_param {
2438+
global_pooling : true
2439+
pool: AVE
2440+
}
2441+
}
2442+
2443+
layer {
2444+
name: "fc1"
2445+
type: "InnerProduct"
2446+
bottom: "pool1"
2447+
top: "fc1"
2448+
param {
2449+
lr_mult: 1
2450+
decay_mult: 1
2451+
}
2452+
param {
2453+
lr_mult: 2
2454+
decay_mult: 0
2455+
}
2456+
inner_product_param {
2457+
num_output: 1000
2458+
weight_filler {
2459+
type: "xavier"
2460+
}
2461+
bias_filler {
2462+
type: "constant"
2463+
value: 0
2464+
}
2465+
}
2466+
}
2467+
2468+
layer {
2469+
name: "prob"
2470+
type: "Softmax"
2471+
bottom: "fc1"
2472+
top: "prob"
2473+
}
2474+

0 commit comments

Comments
 (0)
Please sign in to comment.