发布时间:2024-08-03 18:01
目录
1.快速搭建神经网络
(1)代码
(2)运行结果
import torch
from torch.autograd import Variable
import torch.nn.functional as F
import matplotlib.pyplot as plt
n_data= torch.ones(100,2)
x0 = torch.normal(2*n_data,1)
y0 = torch.zeros(100)
x1 = torch.normal(-2*n_data,1)
y1 = torch.ones(100)
x = torch.cat((x0,x1),0).type(torch.FloatTensor)
y = torch.cat((y0,y1),0).type(torch.LongTensor)
x,y = Variable(x),Variable(y)
# plt.scatter(x.data.numpy()[:,0],x.data.numpy()[:,1],c= y.data.numpy(),s=100,lw =0,cmap =\'RdYlGn\')
# plt.show()
# Net __init__()
# methord 1
class Net(torch.nn.Module):#继承module
def __init__(self,n_features,n_hidden,n_output):
super(Net,self).__init__()#官方步骤,继承
self.hidden = torch.nn.Linear(n_features,n_hidden)
self.predict = torch.nn.Linear(n_hidden,n_output)#预测
def forward(self,x):
# 前向传递过程,搭建神经网络
x = F.relu(self.hidden(x))#一个function
x = self.predict(x)
return x
net = Net(2,10,2) #输入、隐藏层、输出分别为1,10,1
#哪个位置为1就是其对应分类
#methord 2【新的快速搭建方法】
net2 = torch.nn.Sequential(
#一层一层磊神经层
torch.nn.Linear(2,10),
torch.nn.ReLU(),#层的类
torch.nn.Linear(10,2),
)
print(net)
print(net2)
Net(
(hidden): Linear(in_features=2, out_features=10, bias=True)
(predict): Linear(in_features=10, out_features=2, bias=True)
)
Sequential(
(0): Linear(in_features=2, out_features=10, bias=True)
(1): ReLU()
(2): Linear(in_features=10, out_features=2, bias=True)
)
Process finished with exit code 0