Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Select an option

  • Save darkseed/f2258567a94542644f83 to your computer and use it in GitHub Desktop.

Select an option

Save darkseed/f2258567a94542644f83 to your computer and use it in GitHub Desktop.

Revisions

  1. @graphific graphific revised this gist Aug 23, 2015. 1 changed file with 9 additions and 46 deletions.
    55 changes: 9 additions & 46 deletions VGG_ILSVRC_19_layers_train_val.prototxt
    Original file line number Diff line number Diff line change
    @@ -1,3 +1,4 @@
    VGG_ILSVRC_19_layers_train_val.prototxt
    name: "VGG_ILSVRC_19_layers"
    layers {
    name: "data"
    @@ -50,9 +51,7 @@ layers {
    num_output: 64
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv1_1"
    @@ -70,8 +69,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv1_2"
    @@ -99,9 +96,7 @@ layers {
    num_output: 128
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv2_1"
    @@ -119,8 +114,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv2_2"
    @@ -149,8 +142,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_1"
    @@ -168,8 +159,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_2"
    @@ -187,8 +176,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_3"
    @@ -206,8 +193,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_4"
    @@ -235,9 +220,7 @@ layers {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv4_1"
    @@ -255,8 +238,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv4_2"
    @@ -273,9 +254,7 @@ layers {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv4_3"
    @@ -292,9 +271,7 @@ layers {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv4_4"
    @@ -323,8 +300,6 @@ layers {
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv5_1"
    @@ -341,9 +316,7 @@ layers {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv5_2"
    @@ -360,9 +333,7 @@ layers {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv5_3"
    @@ -379,9 +350,7 @@ layers {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    }
    layers {
    bottom: "conv5_4"
    @@ -408,8 +377,6 @@ layers {
    inner_product_param {
    num_output: 4096
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "fc6"
    @@ -434,8 +401,6 @@ layers {
    inner_product_param {
    num_output: 4096
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "fc7"
    @@ -460,8 +425,6 @@ layers {
    inner_product_param {
    num_output: 1000
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    name: "loss"
  2. @graphific graphific created this gist Aug 20, 2015.
    494 changes: 494 additions & 0 deletions VGG_ILSVRC_19_layers_train_val.prototxt
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,494 @@
    name: "VGG_ILSVRC_19_layers"
    layers {
    name: "data"
    type: DATA
    include {
    phase: TRAIN
    }
    transform_param {
    crop_size: 224
    mean_value: 104
    mean_value: 117
    mean_value: 123
    mirror: true
    }
    data_param {
    source: "data/ilsvrc12/ilsvrc12_train_lmdb"
    batch_size: 64
    backend: LMDB
    }
    top: "data"
    top: "label"
    }
    layers {
    name: "data"
    type: DATA
    include {
    phase: TEST
    }
    transform_param {
    crop_size: 224
    mean_value: 104
    mean_value: 117
    mean_value: 123
    mirror: false
    }
    data_param {
    source: "data/ilsvrc12/ilsvrc12_val_lmdb"
    batch_size: 50
    backend: LMDB
    }
    top: "data"
    top: "label"
    }
    layers {
    bottom: "data"
    top: "conv1_1"
    name: "conv1_1"
    type: CONVOLUTION
    convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv1_1"
    top: "conv1_1"
    name: "relu1_1"
    type: RELU
    }
    layers {
    bottom: "conv1_1"
    top: "conv1_2"
    name: "conv1_2"
    type: CONVOLUTION
    convolution_param {
    num_output: 64
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv1_2"
    top: "conv1_2"
    name: "relu1_2"
    type: RELU
    }
    layers {
    bottom: "conv1_2"
    top: "pool1"
    name: "pool1"
    type: POOLING
    pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
    }
    }
    layers {
    bottom: "pool1"
    top: "conv2_1"
    name: "conv2_1"
    type: CONVOLUTION
    convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv2_1"
    top: "conv2_1"
    name: "relu2_1"
    type: RELU
    }
    layers {
    bottom: "conv2_1"
    top: "conv2_2"
    name: "conv2_2"
    type: CONVOLUTION
    convolution_param {
    num_output: 128
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv2_2"
    top: "conv2_2"
    name: "relu2_2"
    type: RELU
    }
    layers {
    bottom: "conv2_2"
    top: "pool2"
    name: "pool2"
    type: POOLING
    pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
    }
    }
    layers {
    bottom: "pool2"
    top: "conv3_1"
    name: "conv3_1"
    type: CONVOLUTION
    convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_1"
    top: "conv3_1"
    name: "relu3_1"
    type: RELU
    }
    layers {
    bottom: "conv3_1"
    top: "conv3_2"
    name: "conv3_2"
    type: CONVOLUTION
    convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_2"
    top: "conv3_2"
    name: "relu3_2"
    type: RELU
    }
    layers {
    bottom: "conv3_2"
    top: "conv3_3"
    name: "conv3_3"
    type: CONVOLUTION
    convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_3"
    top: "conv3_3"
    name: "relu3_3"
    type: RELU
    }
    layers {
    bottom: "conv3_3"
    top: "conv3_4"
    name: "conv3_4"
    type: CONVOLUTION
    convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv3_4"
    top: "conv3_4"
    name: "relu3_4"
    type: RELU
    }
    layers {
    bottom: "conv3_4"
    top: "pool3"
    name: "pool3"
    type: POOLING
    pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
    }
    }
    layers {
    bottom: "pool3"
    top: "conv4_1"
    name: "conv4_1"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv4_1"
    top: "conv4_1"
    name: "relu4_1"
    type: RELU
    }
    layers {
    bottom: "conv4_1"
    top: "conv4_2"
    name: "conv4_2"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv4_2"
    top: "conv4_2"
    name: "relu4_2"
    type: RELU
    }
    layers {
    bottom: "conv4_2"
    top: "conv4_3"
    name: "conv4_3"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv4_3"
    top: "conv4_3"
    name: "relu4_3"
    type: RELU
    }
    layers {
    bottom: "conv4_3"
    top: "conv4_4"
    name: "conv4_4"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv4_4"
    top: "conv4_4"
    name: "relu4_4"
    type: RELU
    }
    layers {
    bottom: "conv4_4"
    top: "pool4"
    name: "pool4"
    type: POOLING
    pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
    }
    }
    layers {
    bottom: "pool4"
    top: "conv5_1"
    name: "conv5_1"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv5_1"
    top: "conv5_1"
    name: "relu5_1"
    type: RELU
    }
    layers {
    bottom: "conv5_1"
    top: "conv5_2"
    name: "conv5_2"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv5_2"
    top: "conv5_2"
    name: "relu5_2"
    type: RELU
    }
    layers {
    bottom: "conv5_2"
    top: "conv5_3"
    name: "conv5_3"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv5_3"
    top: "conv5_3"
    name: "relu5_3"
    type: RELU
    }
    layers {
    bottom: "conv5_3"
    top: "conv5_4"
    name: "conv5_4"
    type: CONVOLUTION
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "conv5_4"
    top: "conv5_4"
    name: "relu5_4"
    type: RELU
    }
    layers {
    bottom: "conv5_4"
    top: "pool5"
    name: "pool5"
    type: POOLING
    pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
    }
    }
    layers {
    bottom: "pool5"
    top: "fc6"
    name: "fc6"
    type: INNER_PRODUCT
    inner_product_param {
    num_output: 4096
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "fc6"
    top: "fc6"
    name: "relu6"
    type: RELU
    }
    layers {
    bottom: "fc6"
    top: "fc6"
    name: "drop6"
    type: DROPOUT
    dropout_param {
    dropout_ratio: 0.5
    }
    }
    layers {
    bottom: "fc6"
    top: "fc7"
    name: "fc7"
    type: INNER_PRODUCT
    inner_product_param {
    num_output: 4096
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    bottom: "fc7"
    top: "fc7"
    name: "relu7"
    type: RELU
    }
    layers {
    bottom: "fc7"
    top: "fc7"
    name: "drop7"
    type: DROPOUT
    dropout_param {
    dropout_ratio: 0.5
    }
    }
    layers {
    name: "fc8"
    bottom: "fc7"
    top: "fc8"
    type: INNER_PRODUCT
    inner_product_param {
    num_output: 1000
    }
    blobs_lr: 0
    blobs_lr: 0
    }
    layers {
    name: "loss"
    type: SOFTMAX_LOSS
    bottom: "fc8"
    bottom: "label"
    top: "loss/loss"
    }
    layers {
    name: "accuracy/top1"
    type: ACCURACY
    bottom: "fc8"
    bottom: "label"
    top: "accuracy@1"
    include: { phase: TEST }
    accuracy_param {
    top_k: 1
    }
    }
    layers {
    name: "accuracy/top5"
    type: ACCURACY
    bottom: "fc8"
    bottom: "label"
    top: "accuracy@5"
    include: { phase: TEST }
    accuracy_param {
    top_k: 5
    }
    }