概述
时间序列预测算法之联邦学习
介绍
设三个节点,其中一个中心节点,两个子节点,子节点利用LSTM模型训练,保证每个epoch完跟中心节点进行交互,完成参数融合
- 子节点部分代码
class LSTM(nn.Module):
def __init__(self, input_size=2, hidden_size=4, output_size=1, num_layer=1):
super(LSTM, self).__init__()
self.layer1 = nn.LSTM(input_size, hidden_size, num_layer)
self.layer2 = nn.Linear(hidden_size, output_size)
def forward(self, x):
x, _ = self.layer1(x)
x = torch.relu(x)
s, b, h = x.size()
x = x.view(s * b, h)
x = self.layer2(x)
x = x.view(s, b, -1)
return x
# 二、模型构建
model = LSTM(look_back, 4, 1, 2)
# print(model)
loss_fun = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.05)
# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# # 定义中心节点的地址端口号
# host = '127.0.0.1'
# port = 9999
# #建立链接
# s.connect((host, port))
# 三、开始训练
losses = list()
steps = list()
for epoch in range(1, EPOCH + 1):
log("