PyTorch的Variable
import torch as t from torch.autograd import Variable as V import matplotlib.pyplot as plt from IPython import display # 指定随机数种子 t.manual_seed(1000) def get_fake_data(batch_size=8): x = t.rand(batch_size,1)*20 y = x * 2 + 3 + 3*t.randn(batch_size,1) return x, y x, y = get_fake_data() plt.scatter(x.squeeze(), y.squeeze()) w = V(t.rand(1,1),requires_grad=True) b = V(t.rand(1,1),requires_grad=True) lr = 0.001 for ii in range(8000): x, y = get_fake_data() x, y = V(x), V(y) # print(x, y) y_pred = x.mm(w) + b.expand_as(x) loss = 0.5*(y_pred - y)**2 loss = loss.sum() # 集结loss向量 loss.backward() w.data.sub_(lr * w.grad.data) b.data.sub_(lr * b.grad.data) w.grad.data.zero_() b.grad.data.zero_() if ii % 1000 == 0: display.clear_output(wait=True) x = t.arange(0,20).view(-1,1) y = x.mm(w.data) + b.data.expand_as(x) plt.plot(x.numpy(), y.numpy()) x2, y2 = get_fake_data(batch_size=20) plt.scatter(x2, y2) plt.xlim(0,20) plt.ylim(0,40) plt.show() print(w.data.squeeze(), b.data.squeeze())
原文地址:https://www.cnblogs.com/hellcat/p/8453861.html
时间: 2024-10-18 10:44:41