zoukankan      html  css  js  c++  java
  • 如何使用 libtorch 实现 VGG16 网络?

    参考地址:https://ethereon.github.io/netscope/#/preset/vgg-16

    按照上面的图来写即可。

    论文地址:https://arxiv.org/pdf/1409.1556.pdf

    // Define a new Module.
    struct Net : torch::nn::Module {
    	Net() {
    		conv1_1 = torch::nn::Conv2d(torch::nn::Conv2dOptions(3, 64, { 3,3 }).padding(1));
    		conv1_2 = torch::nn::Conv2d(torch::nn::Conv2dOptions(64, 64, { 3,3 }).padding(1));
    		conv2_1 = torch::nn::Conv2d(torch::nn::Conv2dOptions(64, 128, { 3,3 }).padding(1));
    		conv2_2 = torch::nn::Conv2d(torch::nn::Conv2dOptions(128, 128, { 3,3 }).padding(1));
    		conv3_1 = torch::nn::Conv2d(torch::nn::Conv2dOptions(128, 256, { 3,3 }).padding(1));
    		conv3_2 = torch::nn::Conv2d(torch::nn::Conv2dOptions(256, 256, { 3,3 }).padding(1));
    		conv3_3 = torch::nn::Conv2d(torch::nn::Conv2dOptions(256, 256, { 3,3 }).padding(1));
    		conv4_1 = torch::nn::Conv2d(torch::nn::Conv2dOptions(256, 512, { 3,3 }).padding(1));
    		conv4_2 = torch::nn::Conv2d(torch::nn::Conv2dOptions(512, 512, { 3,3 }).padding(1));
    		conv4_3 = torch::nn::Conv2d(torch::nn::Conv2dOptions(512, 512, { 3,3 }).padding(1));
    		conv5_1 = torch::nn::Conv2d(torch::nn::Conv2dOptions(512, 512, { 3,3 }).padding(1));
    		conv5_2 = torch::nn::Conv2d(torch::nn::Conv2dOptions(512, 512, { 3,3 }).padding(1));
    		conv5_3 = torch::nn::Conv2d(torch::nn::Conv2dOptions(512, 512, { 3,3 }).padding(1));
    
    		fc1 = torch::nn::Linear(512*7*7,4096);
    		fc2 = torch::nn::Linear(4096, 4096);
    		fc3 = torch::nn::Linear(4096, 1000);
    	}
    
    	// Implement the Net's algorithm.
    	torch::Tensor forward(torch::Tensor x) {
    		x = conv1_1->forward(x);
    		x = torch::relu(x);
    		x = conv1_2->forward(x);
    		x = torch::relu(x);
    		x = torch::max_pool2d(x, { 2,2 }, { 2,2 });
    
    		x = conv2_1->forward(x);
    		x = torch::relu(x);
    		x = conv2_2->forward(x);
    		x = torch::relu(x);
    		x = torch::max_pool2d(x, { 2,2 }, { 2,2 });
    
    		x = conv3_1->forward(x);
    		x = torch::relu(x);
    		x = conv3_2->forward(x);
    		x = torch::relu(x);
    		x = conv3_3->forward(x);
    		x = torch::relu(x);
    		x = torch::max_pool2d(x, { 2,2 }, { 2,2 });
    
    		x = conv4_1->forward(x);
    		x = torch::relu(x);
    		x = conv4_2->forward(x);
    		x = torch::relu(x);
    		x = conv4_3->forward(x);
    		x = torch::relu(x);
    		x = torch::max_pool2d(x, { 2,2 }, { 2,2 });
    
    		x = conv5_1->forward(x);
    		x = torch::relu(x);
    		x = conv5_2->forward(x);
    		x = torch::relu(x);
    		x = conv5_3->forward(x);
    		x = torch::relu(x);
    		x = torch::max_pool2d(x, { 2,2 }, { 2,2 });
    
    		x = x.view({ x.size(0), -1 });//512x7x7 = 25088
    
    		x = fc1->forward(x);
    		x = torch::relu(x);
    		x = torch::dropout(x, 0.5, is_training());
    
    		x = fc2->forward(x);
    		x = torch::relu(x);
    		x = torch::dropout(x, 0.5, is_training());
    
    		x = fc3->forward(x);
    
    		x = torch::log_softmax(x, 1);
    
    		return x;
    	}
    
    	// Use one of many "standard library" modules.
    	torch::nn::Conv2d conv1_1{ nullptr };
    	torch::nn::Conv2d conv1_2{ nullptr };
    	torch::nn::Conv2d conv2_1{ nullptr };
    	torch::nn::Conv2d conv2_2{ nullptr };
    	torch::nn::Conv2d conv3_1{ nullptr };
    	torch::nn::Conv2d conv3_2{ nullptr };
    	torch::nn::Conv2d conv3_3{ nullptr };
    	torch::nn::Conv2d conv4_1{ nullptr };
    	torch::nn::Conv2d conv4_2{ nullptr };
    	torch::nn::Conv2d conv4_3{ nullptr };
    	torch::nn::Conv2d conv5_1{ nullptr };
    	torch::nn::Conv2d conv5_2{ nullptr };
    	torch::nn::Conv2d conv5_3{ nullptr };
    	torch::nn::Linear fc1{ nullptr };
    	torch::nn::Linear fc2{ nullptr };
    	torch::nn::Linear fc3{ nullptr };
    };
    
  • 相关阅读:
    我的架构经验小结(四)-- 实战中演化的三层架构
    DCFramework 动态分布式计算框架(01)-- 基础结构
    XNA基础(01) —— 游戏循环
    使用动态代理记录方法执行的时间
    高性能的大型系统经验 -- 数据查询与分页
    成功的捷径?
    如何激励你的员工?
    DCFramework 动态分布式计算框架(00) -- 序
    在应用程序之间传递动态程序集
    圈 Circle--ESBasic 可复用的.NET类库(03)
  • 原文地址:https://www.cnblogs.com/cheungxiongwei/p/10714974.html
Copyright © 2011-2022 走看看