首先是LSTM网络,写了注释,不多赘述
import numpy as np
import torch
from torch import nn
from torch import nn
from torch.nn import functional
class lstmNet(nn.Module):
def __init__(self):
super(lstmNet, self).__init__()
input_size=30 # 输入的维度
hidden_size=300 # hidden_layer的数目,即输出的维度
vectorSize=50 # 单个数据的向量大小
# [batchSize,30] --> [batchSize,30,vectorSize]
# 词嵌入,生成一个字典,帮助将数据转成向量
# 2000000:我们的数据数量最大不超过2000000个,因此,这也是字典的大小
self.embedding= nn.Embedding(2000000,vectorSize)
# [batchSize,30,vectorSize] --> [batchSize,30,hidden_size]
self.lstm = nn.LSTM(
input_size=vectorSize,
hidden_size=hidden_size, # hidden_layer的数目,即输出的维度
num_layers=5,
batch_first=True, # 输入数据的维度一般是(batch, squence, vector),该属性表征batch是否放在第一个维度
)
# [batchSize,30,hidden_size] --> [batchSize,30,1]
# self.fc1 = nn.Linear(hidden_size, 1)
# [batchSize,30] --> [batchSize,1]
self.fc = nn.Linear(input_size*hidden_size, 1)
def forward(self, x):
x=x.int()
x=self.embedding(x)
x = x.float()
x=torch.reshape(x, (x.shape[0],x.shape[1], x.shape[2] * x.shape[3]))
output,h_c = self.lstm(x)
output = torch.reshape(output, (-1, output.shape[1] * output.shape[2]))
output = functional.relu(self.fc(output))
# output = functional.relu(self.fc2(output))
return output
然后是ResNet网络
from torch import nn
from torch import nn
from torch.nn import functional
class resBlock(nn.Module):
def __init__(self, inputChannel,outputChannel):
super(resBlock, self).__init__()
self.linear1 = nn.Linear(inputChannel, outputChannel)
self.bn1 = nn.BatchNorm1d(outputChannel)
self.linear2 = nn.Linear(outputChannel,outputChannel)
self.bn2 = nn.BatchNorm1d(outputChannel)
# 短接回路
self.extra = nn.Sequential()
if outputChannel != inputChannel:
self.extra = nn.Sequential(
nn.Linear(inputChannel, outputChannel),
nn.BatchNorm1d(outputChannel)
)
def forward(self, x):
# 计算残差块的卷积输出
out = functional.relu(self.bn1(self.linear1(x)))
out = self.bn2(self.linear2(out))
# 计算短接的输出并相加
out = self.extra(x) + out
out = functional.relu(out)
return out
class resNet(nn.Module):
def __init__(self):
super(resNet, self).__init__()
self.linear1=nn.Linear(30,36)
self.bn1 = nn.BatchNorm1d(36)
self.blk1 = resBlock(36, 48)
self.blk2 = resBlock(48, 60)
self.blk3 = resBlock(60, 72)
self.blk4 = resBlock(72, 84)
self.fc=nn.Linear(84,1)
def forward(self, x):
x=functional.relu(self.bn1(self.linear1(x)))
x=functional.relu(self.blk1(x))
x=functional.relu(self.blk2(x))
x=functional.relu(self.blk3(x))
x=functional.relu(self.blk4(x))
x=functional.relu(self.fc(x))
return x
具体的样本和训练脚本就不多说了
评论区