复制 import numpy as np
xy = np.loadtxt('./数据集/diabetes/diabetes.csv.gz',delimiter=',',dtype=np.float32)
x_data = torch.from_numpy(xy[:,:-1])
y_data = torch.from_numpy(xy[:,[-1]]) # 使用[-1]取出数据后是矩阵形式,使用-1则是向量
复制 import torch
class LogisticRegressionModel(torch.nn.Module):
def __init__(self,):
super(LogisticRegressionModel,self).__init__()
self.linear = torch.nn.Linear(8,1)
self.sigmoid = torch.nn.Sigmoid()
def forward(self,X):
y_pred = self.sigmoid(self.linear(X))
return y_pred
lrm = LogisticRegressionModel()
复制 import torch
class LogisticRegressionModel(torch.nn.Module):
def __init__(self,):
super(LogisticRegressionModel,self).__init__()
self.linear1 = torch.nn.Linear(8,6)
self.linear2 = torch.nn.Linear(6,4)
self.linear3 = torch.nn.Linear(4,1)
self.sigmoid = torch.nn.Sigmoid()
def forward(self,X):
x = self.sigmoid(self.linear1(x))
x = self.sigmoid(self.linear2(x))
x = self.sigmoid(self.linear3(x))
return x
lrm = LogisticRegressionModel()
复制 criterion = torch.nn.BCELoss(reduction='sum')
# lr为学习率
optimizer = torch.optim.SGD(lrm.parameters(),lr=0.1)
复制 for epoch in range(100):
y_pred = model(x_data)
loss = criterion(y_pred,y_data)
print(epoch,loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()