layer { name: "input_1" type: "Input" top: "input_1" input_param { shape { dim: 1 dim: 3 dim: 512 dim: 1024 } } } layer { name: "conv0_0" type: "Convolution" bottom: "input_1" top: "conv0_0" convolution_param { num_output: 16 bias_term: true pad: 3 kernel_size: 7 stride: 1 } } layer { name: "conv0_0_relu" type: "ReLU" bottom: "conv0_0" top: "conv0_0_relu" } layer { name: "conv0_2" type: "Convolution" bottom: "conv0_0_relu" top: "conv0_2" convolution_param { num_output: 16 bias_term: true pad: 2 kernel_size: 5 stride: 1 } } layer { name: "conv0_2_relu" type: "ReLU" bottom: "conv0_2" top: "conv0_2_relu" } layer { name: "conv0_4" type: "Convolution" bottom: "conv0_2_relu" top: "conv0_4" convolution_param { num_output: 32 bias_term: true pad: 1 kernel_size: 3 stride: 2 } } layer { name: "conv0_4_relu" type: "ReLU" bottom: "conv0_4" top: "conv0_4_relu" } layer { name: "conv1_0" type: "Convolution" bottom: "conv0_4_relu" top: "conv1_0" convolution_param { num_output: 32 bias_term: true pad: 2 kernel_size: 5 stride: 1 } } layer { name: "conv1_0_relu" type: "ReLU" bottom: "conv1_0" top: "conv1_0_relu" } layer { name: "conv1_2" type: "Convolution" bottom: "conv1_0_relu" top: "conv1_2" convolution_param { num_output: 32 bias_term: true pad: 1 kernel_size: 3 stride: 1 } } layer { name: "conv1_2_relu" type: "ReLU" bottom: "conv1_2" top: "conv1_2_relu" } layer { name: "conv1_3" type: "Convolution" bottom: "conv1_2_relu" top: "conv1_3" convolution_param { num_output: 32 bias_term: true pad: 1 kernel_size: 3 stride: 1 } } layer { name: "conv1_3_relu" type: "ReLU" bottom: "conv1_3" top: "conv1_3_relu" } layer { name: "conv1_4" type: "Convolution" bottom: "conv1_3_relu" top: "conv1_4" convolution_param { num_output: 64 bias_term: true pad: 1 kernel_size: 3 stride: 1 } } layer { name: "conv1_4_relu" type: "ReLU" bottom: "conv1_4" top: "conv1_4_relu" } layer { name: "inception1_t0_0" type: "Convolution" bottom: "conv1_4_relu" top: "inception1_t0_0" convolution_param { num_output: 48 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception1_t1_0" type: "Convolution" bottom: "conv1_4_relu" top: "inception1_t1_0" convolution_param { num_output: 48 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception1_t0_0_relu" type: "ReLU" bottom: "inception1_t0_0" top: "inception1_t0_0_relu" } layer { name: "inception1_t1_0_relu" type: "ReLU" bottom: "inception1_t1_0" top: "inception1_t1_0_relu" } layer { name: "inception1_t3_0" type: "Pooling" bottom: "conv1_4_relu" top: "inception1_t3_0" pooling_param { pool: MAX kernel_size: 2 stride: 2 } } layer { name: "inception1_t0_1" type: "Convolution" bottom: "inception1_t0_0_relu" top: "inception1_t0_1" convolution_param { num_output: 48 bias_term: true pad: 1 kernel_size: 3 stride: 2 } } layer { name: "inception1_t1_1" type: "Convolution" bottom: "inception1_t1_0_relu" top: "inception1_t1_1" convolution_param { num_output: 48 bias_term: true pad: 2 kernel_size: 5 stride: 2 } } layer { name: "inception1_t3_1" type: "Convolution" bottom: "inception1_t3_0" top: "inception1_t3_1" convolution_param { num_output: 48 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception1_t0_1_relu" type: "ReLU" bottom: "inception1_t0_1" top: "inception1_t0_1_relu" } layer { name: "inception1_t1_1_relu" type: "ReLU" bottom: "inception1_t1_1" top: "inception1_t1_1_relu" } layer { name: "inception1_t3_1_relu" type: "ReLU" bottom: "inception1_t3_1" top: "inception1_t3_1_relu" } layer { name: "inception1concat" type: "Concat" bottom: "inception1_t0_1_relu" bottom: "inception1_t1_1_relu" bottom: "inception1_t3_1_relu" top: "inception1concat" } layer { name: "inception1_dim_red" type: "Convolution" bottom: "inception1concat" top: "inception1_dim_red" convolution_param { num_output: 64 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception1_dim_red_relu" type: "ReLU" bottom: "inception1_dim_red" top: "inception1_dim_red_relu" } layer { name: "conv2_0" type: "Convolution" bottom: "inception1_dim_red_relu" top: "conv2_0" convolution_param { num_output: 64 bias_term: true pad: 1 kernel_size: 3 stride: 1 } } layer { name: "conv2_0_relu" type: "ReLU" bottom: "conv2_0" top: "conv2_0_relu" } layer { name: "conv2_1" type: "Convolution" bottom: "conv2_0_relu" top: "conv2_1" convolution_param { num_output: 64 bias_term: true pad: 1 kernel_size: 3 stride: 1 } } layer { name: "conv2_1_relu" type: "ReLU" bottom: "conv2_1" top: "conv2_1_relu" } layer { name: "conv2_3" type: "Convolution" bottom: "conv2_1_relu" top: "conv2_3" convolution_param { num_output: 96 bias_term: true pad: 1 kernel_size: 3 stride: 1 } } layer { name: "conv2_3_relu" type: "ReLU" bottom: "conv2_3" top: "conv2_3_relu" } layer { name: "inception2_t0_0" type: "Convolution" bottom: "conv2_3_relu" top: "inception2_t0_0" convolution_param { num_output: 72 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception2_t1_0" type: "Convolution" bottom: "conv2_3_relu" top: "inception2_t1_0" convolution_param { num_output: 72 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception2_t0_0_relu" type: "ReLU" bottom: "inception2_t0_0" top: "inception2_t0_0_relu" } layer { name: "inception2_t1_0_relu" type: "ReLU" bottom: "inception2_t1_0" top: "inception2_t1_0_relu" } layer { name: "inception2_t3_0" type: "Pooling" bottom: "conv2_3_relu" top: "inception2_t3_0" pooling_param { pool: MAX kernel_size: 2 stride: 2 } } layer { name: "inception2_t0_1" type: "Convolution" bottom: "inception2_t0_0_relu" top: "inception2_t0_1" convolution_param { num_output: 72 bias_term: true pad: 1 kernel_size: 3 stride: 2 } } layer { name: "inception2_t1_1" type: "Convolution" bottom: "inception2_t1_0_relu" top: "inception2_t1_1" convolution_param { num_output: 72 bias_term: true pad: 2 kernel_size: 5 stride: 2 } } layer { name: "inception2_t3_1" type: "Convolution" bottom: "inception2_t3_0" top: "inception2_t3_1" convolution_param { num_output: 72 bias_term: true kernel_size: 1 stride: 1 } } layer { name: "inception2_t0_1_relu" type: "ReLU" bottom: "inception2_t0_1" top: "inception2_t0_1_relu" } layer { name: "inception2_t1_1_relu" type: "ReLU" bottom: "inception2_t1_1" top: "inception2_t1_1_relu" } layer { name: "inception2_t3_1_relu" type: "ReLU" bottom: "inception2_t3_1" top: "inception2_t3_1_relu" } layer { name: "inception2concat" type: "Concat" bottom: "inception2_t0_1_relu" bottom: "inception2_t1_1_relu" bottom: "inception2_t3_1_relu" top: "inception2concat" }