Created
January 20, 2025 04:26
-
-
Save Qu3tzal/c41becb54566e8513f8d90d3a6d0436e to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class SelfAttention(torch.nn.Module): | |
def __init__(self, input_dimension, output_dimension): | |
super().__init__() | |
self.input_dimension = input_dimension | |
self.output_dimension = output_dimension | |
self.Qw = torch.nn.Linear(self.input_dimension, self.output_dimension) | |
self.Kw = torch.nn.Linear(self.input_dimension, self.output_dimension) | |
self.Vw = torch.nn.Linear(self.input_dimension, self.output_dimension) | |
def forward(self, x): | |
Q = # Fill in | |
K = # Fill in | |
V = # Fill in | |
raw_attention_scores = # Fill in | |
normalized_attention_scores = # Fill in | |
attended_inputs = # Fill in | |
return attended_inputs |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment