From bcd3e158cfad328b4fe924e78d858a459bf01efc Mon Sep 17 00:00:00 2001 From: Abhishek Dubey Date: Thu, 13 Jul 2023 15:56:52 +0530 Subject: [PATCH] Update gpt.py Updating the forward function in Transformer block. The code is simple to example the pull request, but still trying my best to explain below: As per paper: In 'Add & Norm' block of Transformer, Layer Norm is applied on top of input/ residual & output of Self-attention. While in the current code, first layer Norm is applied & then added back to the input/ residual. --- gpt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gpt.py b/gpt.py index e4fc68d..39be475 100644 --- a/gpt.py +++ b/gpt.py @@ -131,8 +131,8 @@ def __init__(self, n_embd, n_head): self.ln2 = nn.LayerNorm(n_embd) def forward(self, x): - x = x + self.sa(self.ln1(x)) - x = x + self.ffwd(self.ln2(x)) + x = self.ln1(self.sa(x) + x) + x = self.ln2(self.ffwd(x) + x) return x class GPTLanguageModel(nn.Module):