zoukankan      html  css  js  c++  java
  • Task3.PyTorch实现Logistic regression


    1.PyTorch基础实现代码

     1 import torch
     2 from torch.autograd import Variable
     3 
     4 torch.manual_seed(2)
     5 x_data = Variable(torch.Tensor([[1.0], [2.0], [3.0], [4.0]]))
     6 y_data = Variable(torch.Tensor([[0.0], [0.0], [1.0], [1.0]]))
     7 
     8 #初始化
     9 w = Variable(torch.Tensor([-1]), requires_grad=True)
    10 b = Variable(torch.Tensor([0]), requires_grad=True)
    11 epochs = 100
    12 costs = []
    13 lr = 0.1
    14 print("before training, predict of x = 1.5 is:")
    15 print("y_pred = ", float(w.data*1.5 + b.data > 0))
    16 
    17 #模型训练
    18 for epoch in range(epochs):
    19     #计算梯度
    20     A = 1/(1+torch.exp(-(w*x_data+b))) #逻辑回归函数
    21     J = -torch.mean(y_data*torch.log(A) + (1-y_data)*torch.log(1-A))  #逻辑回归损失函数
    22     #J = -torch.mean(y_data*torch.log(A) + (1-y_data)*torch.log(1-A)) +alpha*w**2
    23     #基础类进行正则化,加上L2范数
    24     costs.append(J.data)
    25     J.backward()  #自动反向传播
    26 
    27     #参数更新
    28     w.data = w.data - lr*w.grad.data
    29     w.grad.data.zero_()
    30     b.data = b.data - lr*b.grad.data
    31     b.grad.data.zero_()
    32 
    33 print("after training, predict of x = 1.5 is:")
    34 print("y_pred =", float(w.data*1.5+b.data > 0))
    35 print(w.data, b.data)


    2.用PyTorch类实现Logistic regression,torch.nn.module写网络结构

     1 import torch
     2 from torch.autograd import Variable
     3 
     4 x_data = Variable(torch.Tensor([[0.6], [1.0], [3.5], [4.0]]))
     5 y_data = Variable(torch.Tensor([[0.], [0.], [1.], [1.]]))
     6 
     7 class Model(torch.nn.Module):
     8     def __init__(self):
     9         super(Model, self).__init__()
    10         self.linear = torch.nn.Linear(1, 1) 
    11         self.sigmoid = torch.nn.Sigmoid()  ###### **sigmoid**
    12 
    13     def forward(self, x):
    14         y_pred = self.sigmoid(self.linear(x))
    15         return y_pred
    16 
    17 
    18 model = Model()
    19 
    20 
    21 criterion = torch.nn.BCELoss(size_average=True)        #损失函数
    22 optimizer = torch.optim.SGD(model.parameters(), lr=0.01)   # 随机梯度下降
    23 
    24 
    25 for epoch in range(500):
    26     # Forward pass
    27     y_pred = model(x_data)
    28 
    29     
    30     loss = criterion(y_pred, y_data)
    31     if epoch % 20 == 0:
    32         print(epoch, loss.item())
    33 
    34     #梯度归零
    35     optimizer.zero_grad()
    36     # 反向传播
    37     loss.backward()
    38     # update weights
    39     optimizer.step()
    40 
    41 hour_var = Variable(torch.Tensor([[0.5]]))
    42 print("predict (after training)", 0.5, model.forward(hour_var).data[0][0])
    43 hour_var = Variable(torch.Tensor([[7.0]]))
    44 print("predict (after training)", 7.0, model.forward(hour_var).data[0][0])

    参考:https://blog.csdn.net/ZZQsAI/article/details/90216593

  • 相关阅读:
    go函数
    Linux 查看磁盘容量、查找大文件、查找大目录
    五分钟理解一致性哈希算法(consistent hashing)
    使用Java实现三个线程交替打印0-74
    Python实现IOC控制反转
    Wannafly挑战赛5 A珂朵莉与宇宙 前缀和+枚举平方数
    Yandex Big Data Essentials Week1 Scaling Distributed File System
    Yandex Big Data Essentials Week1 Unix Command Line Interface Processes managing
    Yandex Big Data Essentials Week1 Unix Command Line Interface File Content exploration
    Yandex Big Data Essentials Week1 Unix Command Line Interface File System exploration
  • 原文地址:https://www.cnblogs.com/NPC-assange/p/11336736.html
Copyright © 2011-2022 走看看