Skip to content

Commit f80b480

Browse files
committed
single layer bidir code implemented
1 parent b8a4352 commit f80b480

File tree

1 file changed

+34
-11
lines changed

1 file changed

+34
-11
lines changed

hidden_rnn.py

Lines changed: 34 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -63,16 +63,16 @@ def forward(self, input):
6363

6464
return output, hidden
6565

66-
# n_layers = 1
67-
# bidirectional = False
68-
# model = Single_Layer_Uni_Directional_RNN(input_dim, embedding_dim, hidden_dim, n_layers, bidirectional)
69-
# output, hidden = model(sequence_tensor)
66+
n_layers = 1
67+
bidirectional = False
68+
model = Single_Layer_Uni_Directional_RNN(input_dim, embedding_dim, hidden_dim, n_layers, bidirectional)
69+
output, hidden = model(sequence_tensor)
7070

71-
# print(f"Input shape is : {sequence_tensor.shape}")
72-
# print(f"Output shape is : {output.shape}")
73-
# print(f"Hidden shape is : {hidden.shape}")
71+
print(f"Input shape is : {sequence_tensor.shape}")
72+
print(f"Output shape is : {output.shape}")
73+
print(f"Hidden shape is : {hidden.shape}")
7474

75-
# assert (output[-1, :, :] == hidden[0]).all(), "Final output must be same as Hidden state in case of Single layer uni-directional RNN"
75+
assert (output[-1, :, :] == hidden[0]).all(), "Final output must be same as Hidden state in case of Single layer uni-directional RNN"
7676

7777

7878
class Multi_Layer_Uni_Directional_RNN(nn.Module):
@@ -106,11 +106,34 @@ def forward(self, input):
106106

107107

108108
class Single_Layer_Bi_Directional_RNN(nn.Module):
109-
def __init__(self):
109+
def __init__(self, input_dim, embedding_dim, hidden_dim, n_layers, bidirectional):
110110
super().__init__()
111+
self.embedding = nn.Embedding(input_dim, embedding_dim)
112+
self.rnn = nn.RNN(embedding_dim, hidden_dim, num_layers=n_layers, bidirectional=bidirectional)
111113

112-
def forward(self):
113-
pass
114+
def forward(self, input):
115+
# input shape => [max_len, batch_size]
116+
117+
embed = self.embedding(input)
118+
# embed shape => [max_len, batch_size, embedding_dim]
119+
120+
output, hidden = self.rnn(embed)
121+
# output shape => [max_len, batch_size, hidden_size * 2] => since forward and backward outputs are stacked
122+
# hidden shape => [2, batch_size, hidden_size]
123+
124+
return output, hidden
125+
126+
n_layers = 1
127+
bidirectional = True
128+
model = Single_Layer_Bi_Directional_RNN(input_dim, embedding_dim, hidden_dim, n_layers, bidirectional)
129+
output, hidden = model(sequence_tensor)
130+
131+
print(f"Input shape is : {sequence_tensor.shape}")
132+
print(f"Output shape is : {output.shape}")
133+
print(f"Hidden shape is : {hidden.shape}")
134+
135+
assert (output[-1, :, :hidden_dim] == hidden[0]).all(), "First hidden_dim of output at last time step must be same as Final Forward Hidden state in case of Single layer bi-directional RNN"
136+
assert (output[0, :, hidden_dim:] == hidden[-1]).all(), "Last hidden_dim of output at initial time step must be same as Final Backward Hidden state in case of Single layer bi-directional RNN"
114137

115138

116139
class Multi_Layer_Bi_Directional_RNN(nn.Module):

0 commit comments

Comments
 (0)