Skip to content
Snippets Groups Projects
Commit d13daed6 authored by Yandi's avatar Yandi
Browse files

added a FFN in CNN

parent 3b063700
No related branches found
No related tags found
1 merge request!1Master into main
...@@ -57,6 +57,7 @@ BidirectionalLSTM: ...@@ -57,6 +57,7 @@ BidirectionalLSTM:
RNN: RNN:
HiddenSize: 35 HiddenSize: 35
NumLayers: 4 NumLayers: 4
NumFFN: 8
Initialization: None Initialization: None
#Name of directory containing logs #Name of directory containing logs
......
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
...@@ -42,16 +42,34 @@ class RNN(nn.Module): ...@@ -42,16 +42,34 @@ class RNN(nn.Module):
super(RNN, self).__init__() super(RNN, self).__init__()
self.hidden_size = cfg["RNN"]["HiddenSize"] self.hidden_size = cfg["RNN"]["HiddenSize"]
self.num_layers = cfg["RNN"]["NumLayers"] self.num_layers = cfg["RNN"]["NumLayers"]
self.num_ffn = cfg["RNN"]["NumFFN"]
# RNN # RNN
self.rnn = nn.Sequential( self.rnn = nn.Sequential(
nn.RNN(input_size, self.hidden_size, self.num_layers, batch_first=True, nonlinearity='relu'), nn.RNN(input_size, self.hidden_size, self.num_layers, batch_first=True, nonlinearity='relu'),
nn.Dropout(p=0.2) nn.Dropout(p=0.2)
) )
self.fc = nn.Sequential()
self.fc = nn.Sequential( for layer in range(self.num_ffn):
nn.Linear(self.hidden_size, 1), self.fc.add_module(
nn.Dropout(p=0.2) f"linear_{layer}", nn.Linear(self.hidden_size, self.hidden_size)
)
self.ffn.add_module(
f"relu_{layer}",
nn.ReLU()
)
self.ffn.add_module(
f"dropout_{layer}",
nn.Dropout(p=0.2)
)
self.fc.add_module(
"last_linear",
nn.Linear(self.hidden_size, 1)
) )
def forward(self, x): def forward(self, x):
use_cuda = torch.cuda.is_available() use_cuda = torch.cuda.is_available()
...@@ -61,8 +79,7 @@ class RNN(nn.Module): ...@@ -61,8 +79,7 @@ class RNN(nn.Module):
device = toch.device('cpu') device = toch.device('cpu')
# Initialize hidden state with zeros # Initialize hidden state with zeros
h0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size).to(device) h0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size).to(device)
# One time step
out, hn = self.rnn(x, h0) out, hn = self.rnn(x, h0)
out = self.fc(out) out = self.fc(out)
return out return out
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment