半年前用numpy写了个鸢尾花分类200行。。每一步计算都是手写的 python构建bp神经网络_鸢尾花分类
现在用pytorch简单写一遍,pytorch语法解释请看上一篇pytorch搭建简单网络
import pandas as pd
import torch.nn as nn
import torch class MyNet(nn.Module):
def __init__(self):
super(MyNet, self).__init__()
self.fc = nn.Sequential(
nn.Linear(4, 3),
nn.Sigmoid(),
nn.Linear(3, 3),
nn.Sigmoid(),
nn.Linear(3, 1),
)
self.mls = nn.MSELoss()
self.opt = torch.optim.Adam(params=self.parameters(), lr=0.001) def get_data(self):
inputs = []
labels = []
with open('flower.csv') as file:
df = pd.read_csv(file, header=None)
x = df.iloc[:, 0:4].values
y = df.iloc[:, 4].values
for i in range(len(x)):
inputs.append(x[i])
for j in range(len(y)):
a = []
a.append(y[j])
labels.append(a) return inputs, labels def forward(self, inputs):
out = self.fc(inputs)
return out def train(self, x, label):
out = self.forward(x)
loss = self.mls(out, label)
self.opt.zero_grad()
loss.backward()
self.opt.step() def test(self, x):
return self.fc(x) if __name__ == '__main__':
net = MyNet()
inputs, labels = net.get_data()
for i in range(1000):
for index, input in enumerate(inputs):
# 这里不加.float()会报错,可能是数据格式的问题吧
input = torch.from_numpy(input).float()
label = torch.Tensor(labels[index])
net.train(input, label)
# 简单测试一下
c = torch.Tensor([[5.6, 2.7, 4.2, 1.3]])
print(net.test(c))
运行结果趋近于0.5 正确,单纯练一下pytorch,就没有分训练集,测试集
tensor([[0.5392]], grad_fn=<AddmmBackward>)
不用手写反向传播和梯度下降 是多么幸福一件事~