Skip to content

Instantly share code, notes, and snippets.

@dienhoa
dienhoa / regression.ipynb
Last active April 27, 2023 20:09
transformer for TimeSeries regression
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@dienhoa
dienhoa / tst_attention.py
Created May 1, 2023 14:31
Transformer for timeseries with attention layer
class OurTST(Module):
def __init__(self, c_in, c_out, d_model, seq_len, n_layers, drop_out, fc_dropout):
self.c_in, self.c_out, self.seq_len = c_in, c_out, seq_len
self.W_P = nn.Linear(c_in, d_model)
# Positional encoding
W_pos = torch.empty((seq_len, d_model), device=default_device())
nn.init.uniform_(W_pos, -0.02, 0.02)
self.W_pos = nn.Parameter(W_pos, requires_grad=True)
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@dienhoa
dienhoa / nbdev_kaggle_patch.py
Last active November 4, 2024 20:55
nbdev kaggle patch
## Patch to modify nbdev AUTOGENERATED comment for kaggle Packages
## Lead to # AUTOGENERATED! DO NOT EDIT! File to edit: kaggle.com/code/test.ipynb.
import os
import nbdev
from nbdev.maker import ModuleMaker
from nbdev.config import write_cells
from fastcore.basics import patch
from fastcore.foundation import L
from textwrap import TextWrapper