Skip to content

Instantly share code, notes, and snippets.

@Qu3tzal
Created January 20, 2025 04:26
Show Gist options
  • Save Qu3tzal/c41becb54566e8513f8d90d3a6d0436e to your computer and use it in GitHub Desktop.
Save Qu3tzal/c41becb54566e8513f8d90d3a6d0436e to your computer and use it in GitHub Desktop.
class SelfAttention(torch.nn.Module):
def __init__(self, input_dimension, output_dimension):
super().__init__()
self.input_dimension = input_dimension
self.output_dimension = output_dimension
self.Qw = torch.nn.Linear(self.input_dimension, self.output_dimension)
self.Kw = torch.nn.Linear(self.input_dimension, self.output_dimension)
self.Vw = torch.nn.Linear(self.input_dimension, self.output_dimension)
def forward(self, x):
Q = # Fill in
K = # Fill in
V = # Fill in
raw_attention_scores = # Fill in
normalized_attention_scores = # Fill in
attended_inputs = # Fill in
return attended_inputs
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment