Skip to content

Instantly share code, notes, and snippets.

View gabrieldernbach's full-sized avatar

gabrieldernbach gabrieldernbach

View GitHub Profile
@gabrieldernbach
gabrieldernbach / flow_matching.py
Created September 9, 2024 17:40
flow matching in 60 line of code
import torch
import torch.nn as nn
from sklearn.datasets import make_moons
from tqdm import tqdm
import matplotlib.pyplot as plt
class Flow(nn.Module):
def __init__(self, n_dim=2, n_pos_dim=2, n_hidden=64):
super().__init__()
self.n_dim = n_dim
# leverage scores l_[i] = tr(X @ (X^T X)^-1 @ X)
# indication of self-sensitivity or self-influence of i-th sample.
import numpy as np
n = 2048 # samples
d = 256 # dimensions
X = np.random.randn(n, d) # design matrix
# navie computation, high memory footprint (quadtratic in n^2)
l_naive = np.trace(X @ np.linalg.inv(X) @ X)
from dataclasses import dataclass, replace
from joblib import Parallel, delayed
from typing import Optional, Callable, List, Iterable
@dataclass(frozen=True)
class Either:
value: Optional = None
exception: Optional = None
def forward_exceptions(fun: Callable):
FROM ubuntu:20.04
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update; apt-get upgrade -y
RUN apt-get install -y emacs vim r-base r-base-dev libcurl4-openssl-dev
ARG DOWNLOAD_STATIC_LIBV8=1
RUN R -e 'install.packages("rstan")'
import torch.nn as nn
class Residual(nn.Module):
def __init__(self, dim):
super().__init__()
self.layer = nn.Sequential(
nn.Conv2d(dim, dim, 7, 1, 3, groups=dim),
nn.BachNorm2d(dim),
nn.Conv2d(dim, dim*4, 1),
nn.ReLU(),
@gabrieldernbach
gabrieldernbach / functional.py
Created June 6, 2022 17:51
python method chaining without monads
import functools
from joblib import Parallel, delayed
def compose2(f, g):
return lambda x: g(f(x))
def compose(*fs):
return functools.reduce(compose2, fs)
def pipe(x, *fs):
@gabrieldernbach
gabrieldernbach / Knap_sack.py
Last active June 6, 2022 17:36
A fast solver for the knap-sack problem. This is a branch and bound best-first-search. The bound is derived from the continuous relaxation of the problem, which can be identified as a linear-program and is solved efficiently in 1D by sorting.
import functools
import queue
from random import randint
from random import seed
from random import uniform
from typing import NamedTuple
class Item(NamedTuple):
id: int
import numpy
import torch
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
from torchvision.datasets import MNIST
from torch.utils.data import DataLoader
import torchvision.transforms as T
from einops import rearrange
@gabrieldernbach
gabrieldernbach / kernel-k-means.py
Last active June 6, 2022 17:39
fast non linear clustering on millions of datapoints
import numpy as np
import matplotlib.pyplot as plt
from sklearn.kernel_approximation import Nystroem
from sklearn.cluster import MiniBatchKMeans
# dot in the middle
X = np.random.randn(100, 2)
# circle around
Y = X / np.sqrt((X**2).mean(1, keepdims=True)) * 8
Y = Y + np.random.randn(100, 2)
@gabrieldernbach
gabrieldernbach / ml_code_golf.py
Last active June 6, 2022 17:40
Solving mnist, fast and short
from torchvision.datasets import MNIST
import numpy as np
def data(train):
mnist = MNIST(root='.', download=True, train=train)
X = mnist.data.numpy().reshape(-1, 784) / 255
y = mnist.targets.numpy()
return X, y