zoukankan      html  css  js  c++  java
  • pytorch练习

    1、使用梯度下降法拟合y = sin(x)

    import numpy as np
    import torch
    import torchvision
    import torch.optim as optim
    import torch.nn as nn
    import torch.nn.functional as F
    import time
    import os
    from skimage import io, transform
    import matplotlib.pyplot as plt
    from torch.utils.data import Dataset, DataLoader
    from torchvision import transforms, utils
    import pandas as pd
    import math
    
    import warnings
    plt.ion()
    np.random.seed(2)
    
    
    if __name__ == '__main__':
        # landmarks_frame = pd.read_csv('data/faces/face_landmarks.csv')
        # n = 65
        # image_name = landmarks_frame.iloc[n, 0]
        # landmarks_frame = landmarks_frame.iloc[n, 1:].as_matrix()
        # landmarks_frame = landmarks_frame.astype('float').reshape(-1, 2)
        x = np.linspace(-math.pi, math.pi, 2000)
        y = np.sin(x)
    
        a = np.random.randn()
        b = np.random.randn()
        c = np.random.randn()
        d = np.random.randn()
    
        learning_rate = 1e-6
        for t in range(10000):
            y_pred = a + b * x + c * x ** 2 +d * x ** 3
            loss = np.square(y_pred - y).sum()
            if t % 100 == 99:
                print(t, loss)
            grad_y_pred = 2.0 * (y_pred - y)
            grad_a = grad_y_pred.sum()
            grad_b = (grad_y_pred * x).sum()
            grad_c = (grad_y_pred * x ** 2).sum()
            grad_d = (grad_y_pred * x ** 3).sum()
    
            a -= learning_rate * grad_a
            b -= learning_rate * grad_b
            c -= learning_rate * grad_c
            d -= learning_rate * grad_d
    
        print(f'Result: y = {a} + {b} x + {c} x^2 + {d} X ^ 3')
    
        # a = np.random.rand(2, 2)
        # print(a)
        # device = torch.device("cuda:0")

    2、使用pytorch的自动求导:

    import numpy as np
    import torch
    import torchvision
    import torch.optim as optim
    import torch.nn as nn
    import torch.nn.functional as F
    import time
    import os
    from skimage import io, transform
    import matplotlib.pyplot as plt
    from torch.utils.data import Dataset, DataLoader
    from torchvision import transforms, utils
    import pandas as pd
    import math
    
    import warnings
    plt.ion()
    np.random.seed(2)
    
    
    if __name__ == '__main__':
        dtype = torch.float
        # landmarks_frame = pd.read_csv('data/faces/face_landmarks.csv')
        # n = 65
        # image_name = landmarks_frame.iloc[n, 0]
        # landmarks_frame = landmarks_frame.iloc[n, 1:].as_matrix()
        # landmarks_frame = landmarks_frame.astype('float').reshape(-1, 2)
        x = torch.linspace(-math.pi, math.pi, 2000, dtype=dtype)
        y = torch.sin(x)
    
        a = torch.randn((), dtype=dtype, requires_grad=True)
        b = torch.randn((), dtype=dtype, requires_grad=True)
        c = torch.randn((), dtype=dtype, requires_grad=True)
        d = torch.randn((), dtype=dtype, requires_grad=True)
    
        learning_rate = 1e-6
        for t in range(10000):
            y_pred = a + b * x + c * x ** 2 + d * x ** 3
            loss = (y_pred - y).pow(2).sum()
            if t % 100 == 99:
                print(t, loss.item())
            loss.backward()
            # grad_y_pred = 2.0 * (y_pred - y)
            # grad_a = grad_y_pred.sum()
            # grad_b = (grad_y_pred * x).sum()
            # grad_c = (grad_y_pred * x ** 2).sum()
            # grad_d = (grad_y_pred * x ** 3).sum()
            with torch.no_grad():
                a -= learning_rate * a.grad
                b -= learning_rate * b.grad
                c -= learning_rate * c.grad
                d -= learning_rate * d.grad
    
                a.grad = None
                b.grad = None
                c.grad = None
                d.grad = None
    
        print(f'Result: y = {a} + {b} x + {c} x^2 + {d} X ^ 3')
    
        # a = np.random.rand(2, 2)
        # print(a)
        # device = torch.device("cuda:0")

     3、采用神经网络预测:

    import numpy as np
    import torch
    import torchvision
    import torch.optim as optim
    import torch.nn as nn
    import torch.nn.functional as F
    import time
    import os
    from skimage import io, transform
    import matplotlib.pyplot as plt
    from torch.utils.data import Dataset, DataLoader
    from torchvision import transforms, utils
    import pandas as pd
    import math
    
    import warnings
    plt.ion()
    
    
    
    
    if __name__ == '__main__':
        device = 'cpu'
        dtype = torch.float
        x = torch.linspace(-math.pi, math.pi, 2000, device=device, dtype=dtype)
        print(x.shape)
        y = torch.sin(x)
        p = torch.tensor([1, 2, 3])
        xx = x.unsqueeze(-1).pow(p)
        model = torch.nn.Sequential(
            torch.nn.Linear(3, 1),
            torch.nn.Flatten(0, 1)
        )
        loss_fn = torch.nn.MSELoss(reduction='sum')
    
        learning_rate = 1e-6
    
        for t in range(2000):
            y_pred = model(xx)
            loss = loss_fn(y_pred, y)
            if t % 100 == 99:
                print(t, loss.item())
            model.zero_grad()
            loss.backward()
            with torch.no_grad():
                for parameters in model.parameters():
                    parameters -= learning_rate * parameters.grad
        linear_layer = model[0]
        print(
            f'Result: y = {linear_layer.bias.item()} + {linear_layer.weight[:, 0].item()} x + {linear_layer.weight[:, 1].item()} x^2 + {linear_layer.weight[:, 2].item()} x^3')
        # a = np.random.rand(2, 2)
        # print(a)
        # device = torch.device("cuda:0")

     4、采用自动计算loss和更新参数等:

    import numpy as np
    import torch
    import torchvision
    import torch.optim as optim
    import torch.nn as nn
    import torch.nn.functional as F
    import time
    import os
    from skimage import io, transform
    import matplotlib.pyplot as plt
    from torch.utils.data import Dataset, DataLoader
    from torchvision import transforms, utils
    import pandas as pd
    import math
    import random
    
    import warnings
    
    plt.ion()
    
    
    class DynamicNet(torch.nn.Module):
        def __init__(self):
            super().__init__()
            self.a = torch.nn.Parameter(torch.randn(()))
            self.b = torch.nn.Parameter(torch.randn(()))
            self.c = torch.nn.Parameter(torch.randn(()))
            self.d = torch.nn.Parameter(torch.randn(()))
            self.e = torch.nn.Parameter(torch.randn(()))
    
        def forward(self, x):
            y = self.a + self.b * x + self.c * x ** 2 + self.d * x ** 3
            for exp in range(4, random.randint(4, 6)):
                y = y + self.e * x ** exp
            return y
    
        def string(self):
            return f'y = {self.a.item()} + {self.b.item()} x + {self.c.item()} x^2 + {self.d.item()} x^3 + {self.e.item()} x^4 ? + {self.e.item()} x^5 ?'
    
    
    if __name__ == '__main__':
        device = 'cpu'
        dtype = torch.float
        x = torch.linspace(-math.pi, math.pi, 2000, device=device, dtype=dtype)
        y = torch.sin(x)
        p = torch.tensor([1, 2, 3])
        model = DynamicNet()
        criterion = torch.nn.MSELoss(reduction='sum')
        learning_rate = 1e-8
        optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, momentum=0.9)
        for t in range(30000):
            y_pred = model(x)
            loss = criterion(y_pred, y)
            if t % 2000 == 1999:
                print(t, loss.item())
            model.zero_grad()
            loss.backward()
            optimizer.step()
        print(f'Result: {model.string()}')
  • 相关阅读:
    THOR: Tracking Holistic Object Representations
    Multi-Agent Reinforcement Learning Based Frame Sampling for Effective Untrimmed Video Recognition
    Multi-shot Pedestrian Re-identification via Sequential Decision Making
    Deep Reinforcement Learning with Iterative Shift for Visual Tracking
    A3C 算法资料收集
    Soft Actor-Critic: Off-Policy Maximum Entropy Deep Reinforcement Learning with a Stochastic Actor
    Real-time ‘Actor-Critic’ Tracking
    基于java的分布式爬虫
    跟我一起数据挖掘(23)——C4.5
    众推架构的进一步讨论
  • 原文地址:https://www.cnblogs.com/pkgunboat/p/14342558.html
Copyright © 2011-2022 走看看