-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodel_decoder_lstm.py
executable file
·53 lines (42 loc) · 1.2 KB
/
model_decoder_lstm.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
#
#Copyright (C) 2020-2021 ISTI-CNR
#Licensed under the BSD 3-Clause Clear License (see license.txt)
#
import torch
import torch.nn as nn
import torch.nn.functional as F
#
#DECODER a classic LSTM
#
class DecoderLSTM(nn.Module):
#
#
#
def __init__(self, features_size = 512, hidden_layers = 2, hidden_nodes = 128, fc_size = 128, p_drop_out = 0.2):
super(DecoderLSTM, self).__init__()
self.LSTM = nn.LSTM(
input_size = features_size,
hidden_size = hidden_nodes,
num_layers = hidden_layers,
batch_first = True
)
self.f = nn.Sequential(
nn.Linear(hidden_nodes, fc_size),
nn.ReLU(),
nn.Dropout(p = p_drop_out),
nn.Linear(fc_size, 1)
)
#
#
#
def forward(self, x):
#run the LSTM
self.LSTM.flatten_parameters()
out, (h_n, h_c) = self.LSTM(x, None)
#run the regressor
z = out[:, -1, :]
y = self.f(z)
if not self.training:
y = y.clamp(0.0, 1.0)
y = y.squeeze(0)
return y