zoukankan      html  css  js  c++  java
  • VGG-19 和 VGG-16 的 prototxt文件

    VGG-19 和 VGG-16 的 prototxt文件
     

    VGG-19 和 VGG-16 的 prototxt文件

    VGG-16:
    prototxt 地址:https://gist.github.com/ksimonyan/3785162f95cd2d5fee77#file-readme-md
    caffemodel 地址:http://www.robots.ox.ac.uk/~vgg/software/very_deep/caffe/VGG_ILSVRC_16_layers.caffemodel

    VGG-19:
    prototxt 地址:https://gist.github.com/ksimonyan/3785162f95cd2d5fee77#file-readme-md
    caffemodel 地址:http://www.robots.ox.ac.uk/~vgg/software/very_deep/caffe/VGG_ILSVRC_19_layers.caffemodel

    VGG_16.prototxt 文件:

    name: "VGG_ILSVRC_19_layer"
    
    layer {
    name: "data"
    type: "ImageData"
    top: "data"
    top: "label"
    include {
    phase: TRAIN
    }
    
    image_data_param {
    batch_size: 12
    source: "../../fine_tuning_data/HAT_fineTuning_data/train_data_fineTuning.txt"
    root_folder: "../../fine_tuning_data/HAT_fineTuning_data/train_data/"
    }
    }
    
    layer {
    name: "data"
    type: "ImageData"
    top: "data"
    top: "label"
    include {
    phase: TEST
    }
    transform_param {
    mirror: false
    }
    image_data_param {
    batch_size: 10
    source: "../../fine_tuning_data/HAT_fineTuning_data/test_data_fineTuning.txt"
    root_folder: "../../fine_tuning_data/HAT_fineTuning_data/test_data/"
    }
    }
    
    layer {
    bottom:"data" 
    top:"conv1_1" 
    name:"conv1_1" 
    type:"Convolution" 
    convolution_param {
    num_output:64 
    pad:1
    kernel_size:3 
    }
    }
    layer {
    bottom:"conv1_1" 
    top:"conv1_1" 
    name:"relu1_1" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv1_1" 
    top:"conv1_2" 
    name:"conv1_2" 
    type:"Convolution" 
    convolution_param {
    num_output:64 
    pad:1
    kernel_size:3
    }
    }
    layer {
    bottom:"conv1_2" 
    top:"conv1_2" 
    name:"relu1_2" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv1_2" 
    top:"pool1" 
    name:"pool1" 
    type:"Pooling" 
    pooling_param {
    pool:MAX 
    kernel_size:2
    stride:2 
    }
    }
    layer {
    bottom:"pool1" 
    top:"conv2_1" 
    name:"conv2_1" 
    type:"Convolution" 
    convolution_param {
    num_output:128
    pad:1
    kernel_size:3
    }
    }
    layer {
    bottom:"conv2_1" 
    top:"conv2_1" 
    name:"relu2_1" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv2_1" 
    top:"conv2_2" 
    name:"conv2_2" 
    type:"Convolution" 
    convolution_param {
    num_output:128 
    pad:1
    kernel_size:3
    }
    }
    layer {
    bottom:"conv2_2" 
    top:"conv2_2" 
    name:"relu2_2" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv2_2" 
    top:"pool2" 
    name:"pool2" 
    type:"Pooling" 
    pooling_param {
    pool:MAX
    kernel_size:2 
    stride:2 
    }
    }
    layer {
    bottom:"pool2" 
    top:"conv3_1" 
    name: "conv3_1"
    type:"Convolution" 
    convolution_param {
    num_output:256 
    pad:1
    kernel_size:3
    }
    }
    layer {
    bottom:"conv3_1" 
    top:"conv3_1" 
    name:"relu3_1" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv3_1" 
    top:"conv3_2" 
    name:"conv3_2" 
    type:"Convolution" 
    convolution_param {
    num_output:256
    pad:1
    kernel_size:3
    }
    }
    layer {
    bottom:"conv3_2" 
    top:"conv3_2" 
    name:"relu3_2" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv3_2" 
    top:"conv3_3" 
    name:"conv3_3" 
    type:"Convolution" 
    convolution_param {
    num_output:256 
    pad:1 
    kernel_size:3
    }
    }
    layer {
    bottom:"conv3_3" 
    top:"conv3_3"
    name:"relu3_3" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv3_3" 
    top:"conv3_4" 
    name:"conv3_4" 
    type:"Convolution" 
    convolution_param {
    num_output:256
    pad:1
    kernel_size:3
    }
    }
    layer {
    bottom:"conv3_4" 
    top:"conv3_4" 
    name:"relu3_4" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv3_4" 
    top:"pool3" 
    name:"pool3" 
    type:"Pooling" 
    pooling_param {
    pool:MAX 
    kernel_size: 2
    stride: 2
    }
    }
    layer {
    bottom:"pool3" 
    top:"conv4_1" 
    name:"conv4_1" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv4_1" 
    top:"conv4_1" 
    name:"relu4_1" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv4_1" 
    top:"conv4_2" 
    name:"conv4_2" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv4_2" 
    top:"conv4_2" 
    name:"relu4_2" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv4_2" 
    top:"conv4_3" 
    name:"conv4_3" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv4_3" 
    top:"conv4_3" 
    name:"relu4_3" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv4_3" 
    top:"conv4_4" 
    name:"conv4_4" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv4_4" 
    top:"conv4_4" 
    name:"relu4_4" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv4_4" 
    top:"pool4" 
    name:"pool4" 
    type:"Pooling" 
    pooling_param {
    pool:MAX
    kernel_size: 2
    stride: 2
    }
    }
    layer {
    bottom:"pool4" 
    top:"conv5_1" 
    name:"conv5_1" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv5_1" 
    top:"conv5_1" 
    name:"relu5_1" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv5_1" 
    top:"conv5_2" 
    name:"conv5_2" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv5_2" 
    top:"conv5_2" 
    name:"relu5_2" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv5_2" 
    top:"conv5_3" 
    name:"conv5_3" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv5_3" 
    top:"conv5_3" 
    name:"relu5_3" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv5_3" 
    top:"conv5_4" 
    name:"conv5_4" 
    type:"Convolution" 
    convolution_param {
    num_output: 512
    pad: 1
    kernel_size: 3
    }
    }
    layer {
    bottom:"conv5_4" 
    top:"conv5_4" 
    name:"relu5_4" 
    type:"ReLU" 
    }
    layer {
    bottom:"conv5_4" 
    top:"pool5" 
    name:"pool5" 
    type:"Pooling" 
    pooling_param {
    pool:MAX 
    kernel_size: 2
    stride: 2
    }
    }
    layer {
    bottom:"pool5" 
    top:"fc6_" 
    name:"fc6_" 
    type:"InnerProduct" 
    inner_product_param {
    num_output: 4096
    }
    }
    layer {
    bottom:"fc6_" 
    top:"fc6_" 
    name:"relu6" 
    type:"ReLU" 
    }
    layer {
    bottom:"fc6_" 
    top:"fc6_" 
    name:"drop6" 
    type:"Dropout" 
    dropout_param {
    dropout_ratio: 0.5
    }
    }
    layer {
    bottom:"fc6_" 
    top:"fc7" 
    name:"fc7" 
    type:"InnerProduct" 
    inner_product_param {
    num_output: 4096
    }
    }
    layer {
    bottom:"fc7" 
    top:"fc7" 
    name:"relu7" 
    type:"ReLU" 
    }
    layer {
    bottom:"fc7" 
    top:"fc7" 
    name:"drop7" 
    type:"Dropout" 
    dropout_param {
    dropout_ratio: 0.5
    }
    }
    layer {
    bottom:"fc7" 
    top:"fc8_" 
    name:"fc8_" 
    type:"InnerProduct" 
    inner_product_param {
    num_output: 43
    }
    }
    
    layer {
    name: "sigmoid"
    type: "Sigmoid"
    bottom: "fc8_"
    top: "fc8_"
    }
    
    layer {
    name: "accuracy"
    type: "Accuracy"
    bottom: "fc8_"
    bottom: "label"
    top: "accuracy"
    include {
    phase: TEST
    }
    }
    
    layer {
    name: "loss"
    type: "EuclideanLoss"
    bottom: "fc8_"
    bottom: "label"
    top: "loss"
    }
    

      

    name: "VGG_ILSVRC_16_layer"
    layers {
    name: "data"
    type: IMAGE_DATA
    top: "data"
    top: "label"
    include {
    phase: TRAIN
    }
    
    image_data_param {
    batch_size: 80
    source: "/home/wangxiao/SUN397_part/selected_sun/Sun-100/Sun_100_Labeled_Train_0.5_.txt"
    root_folder: "/home/wangxiao/SUN397_part/selected_sun/Sun-100/train_image_sun_256_256/"
    new_height: 224
    new_ 224
    }
    }
    
    layers {
    name: "data"
    type: IMAGE_DATA
    top: "data"
    top: "label"
    include {
    phase: TEST
    }
    transform_param {
    mirror: false
    }
    image_data_param {
    batch_size: 10
    source: "/home/wangxiao/SUN397_part/selected_sun/Sun-100/Sun_100_Test_0.5_.txt"
    root_folder: "/home/wangxiao/SUN397_part/selected_sun/Sun-100/test_image_sun_227_227/"
    new_height:224
    new_224
    }
    }
    
    layers {
      bottom: "data"
      top: "conv1_1"
      name: "conv1_1"
      type: CONVOLUTION
      convolution_param {
        num_output: 64
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv1_1"
      top: "conv1_1"
      name: "relu1_1"
      type: RELU
    }
    layers {
      bottom: "conv1_1"
      top: "conv1_2"
      name: "conv1_2"
      type: CONVOLUTION
      convolution_param {
        num_output: 64
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv1_2"
      top: "conv1_2"
      name: "relu1_2"
      type: RELU
    }
    layers {
      bottom: "conv1_2"
      top: "pool1"
      name: "pool1"
      type: POOLING
      pooling_param {
        pool: MAX
        kernel_size: 2
        stride: 2
      }
    }
    layers {
      bottom: "pool1"
      top: "conv2_1"
      name: "conv2_1"
      type: CONVOLUTION
      convolution_param {
        num_output: 128
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv2_1"
      top: "conv2_1"
      name: "relu2_1"
      type: RELU
    }
    layers {
      bottom: "conv2_1"
      top: "conv2_2"
      name: "conv2_2"
      type: CONVOLUTION
      convolution_param {
        num_output: 128
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv2_2"
      top: "conv2_2"
      name: "relu2_2"
      type: RELU
    }
    layers {
      bottom: "conv2_2"
      top: "pool2"
      name: "pool2"
      type: POOLING
      pooling_param {
        pool: MAX
        kernel_size: 2
        stride: 2
      }
    }
    layers {
      bottom: "pool2"
      top: "conv3_1"
      name: "conv3_1"
      type: CONVOLUTION
      convolution_param {
        num_output: 256
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv3_1"
      top: "conv3_1"
      name: "relu3_1"
      type: RELU
    }
    layers {
      bottom: "conv3_1"
      top: "conv3_2"
      name: "conv3_2"
      type: CONVOLUTION
      convolution_param {
        num_output: 256
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv3_2"
      top: "conv3_2"
      name: "relu3_2"
      type: RELU
    }
    layers {
      bottom: "conv3_2"
      top: "conv3_3"
      name: "conv3_3"
      type: CONVOLUTION
      convolution_param {
        num_output: 256
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv3_3"
      top: "conv3_3"
      name: "relu3_3"
      type: RELU
    }
    layers {
      bottom: "conv3_3"
      top: "pool3"
      name: "pool3"
      type: POOLING
      pooling_param {
        pool: MAX
        kernel_size: 2
        stride: 2
      }
    }
    layers {
      bottom: "pool3"
      top: "conv4_1"
      name: "conv4_1"
      type: CONVOLUTION
      convolution_param {
        num_output: 512
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv4_1"
      top: "conv4_1"
      name: "relu4_1"
      type: RELU
    }
    layers {
      bottom: "conv4_1"
      top: "conv4_2"
      name: "conv4_2"
      type: CONVOLUTION
      convolution_param {
        num_output: 512
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv4_2"
      top: "conv4_2"
      name: "relu4_2"
      type: RELU
    }
    layers {
      bottom: "conv4_2"
      top: "conv4_3"
      name: "conv4_3"
      type: CONVOLUTION
      convolution_param {
        num_output: 512
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv4_3"
      top: "conv4_3"
      name: "relu4_3"
      type: RELU
    }
    layers {
      bottom: "conv4_3"
      top: "pool4"
      name: "pool4"
      type: POOLING
      pooling_param {
        pool: MAX
        kernel_size: 2
        stride: 2
      }
    }
    layers {
      bottom: "pool4"
      top: "conv5_1"
      name: "conv5_1"
      type: CONVOLUTION
      convolution_param {
        num_output: 512
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv5_1"
      top: "conv5_1"
      name: "relu5_1"
      type: RELU
    }
    layers {
      bottom: "conv5_1"
      top: "conv5_2"
      name: "conv5_2"
      type: CONVOLUTION
      convolution_param {
        num_output: 512
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv5_2"
      top: "conv5_2"
      name: "relu5_2"
      type: RELU
    }
    layers {
      bottom: "conv5_2"
      top: "conv5_3"
      name: "conv5_3"
      type: CONVOLUTION
      convolution_param {
        num_output: 512
        pad: 1
        kernel_size: 3
      }
    }
    layers {
      bottom: "conv5_3"
      top: "conv5_3"
      name: "relu5_3"
      type: RELU
    }
    layers {
      bottom: "conv5_3"
      top: "pool5"
      name: "pool5"
      type: POOLING
      pooling_param {
        pool: MAX
        kernel_size: 2
        stride: 2
      }
    }
    layers {
      bottom: "pool5"
      top: "fc6"
      name: "fc6"
      type: INNER_PRODUCT
      inner_product_param {
        num_output: 4096
      }
    }
    layers {
      bottom: "fc6"
      top: "fc6"
      name: "relu6"
      type: RELU
    }
    layers {
      bottom: "fc6"
      top: "fc6"
      name: "drop6"
      type: DROPOUT
      dropout_param {
        dropout_ratio: 0.5
      }
    }
    layers {
      bottom: "fc6"
      top: "fc7"
      name: "fc7"
      type: INNER_PRODUCT
      inner_product_param {
        num_output: 4096
      }
    }
    layers {
      bottom: "fc7"
      top: "fc7"
      name: "relu7"
      type: RELU
    }
    layers {
      bottom: "fc7"
      top: "fc7"
      name: "drop7"
      type: DROPOUT
      dropout_param {
        dropout_ratio: 0.5
      }
    }
    layers {
      bottom: "fc7"
      top: "fc8_"
      name: "fc8_"
      type: INNER_PRODUCT
      inner_product_param {
        num_output: 88
      }
    }
    layers {
      name: "accuracy"
      type: ACCURACY
      bottom: "fc8_"
      bottom: "label"
      top: "accuracy"
      include {
        phase: TEST
      }
    }
    layers{
      name: "loss"
      type: SOFTMAX_LOSS
      bottom: "fc8_"
      bottom: "label"
      top: "loss"
    }
    

      

  • 相关阅读:
    OO第四单元作业总结暨完结撒花
    OO第三单元作业总结【自我审判】
    菜鸡学C语言之知识点简单整理
    菜鸡学C语言之混凝土(四柱汉诺塔)
    OO第二单元作业总结【自我反思与审视】
    菜鸡学C语言之寻根溯源
    菜鸡学C语言之真心话大冒险
    菜鸡学C语言之摸鱼村村长
    OO面向对象第一单元总结
    day10 python全栈学习笔记
  • 原文地址:https://www.cnblogs.com/wangxiaocvpr/p/5712800.html
Copyright © 2011-2022 走看看