Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class OurTST(Module): | |
def __init__(self, c_in, c_out, d_model, seq_len, n_layers, drop_out, fc_dropout): | |
self.c_in, self.c_out, self.seq_len = c_in, c_out, seq_len | |
self.W_P = nn.Linear(c_in, d_model) | |
# Positional encoding | |
W_pos = torch.empty((seq_len, d_model), device=default_device()) | |
nn.init.uniform_(W_pos, -0.02, 0.02) | |
self.W_pos = nn.Parameter(W_pos, requires_grad=True) |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
## Patch to modify nbdev AUTOGENERATED comment for kaggle Packages | |
## Lead to # AUTOGENERATED! DO NOT EDIT! File to edit: kaggle.com/code/test.ipynb. | |
import os | |
import nbdev | |
from nbdev.maker import ModuleMaker | |
from nbdev.config import write_cells | |
from fastcore.basics import patch | |
from fastcore.foundation import L | |
from textwrap import TextWrapper |
OlderNewer