Skip to content

Instantly share code, notes, and snippets.

View gabrieldernbach's full-sized avatar

gabrieldernbach gabrieldernbach

View GitHub Profile
from dataclasses import dataclass, replace
from joblib import Parallel, delayed
from typing import Optional, Callable, List, Iterable
@dataclass(frozen=True)
class Either:
value: Optional = None
exception: Optional = None
def forward_exceptions(fun: Callable):
# leverage scores l_[i] = tr(X @ (X^T X)^-1 @ X)
# indication of self-sensitivity or self-influence of i-th sample.
import numpy as np
n = 2048 # samples
d = 256 # dimensions
X = np.random.randn(n, d) # design matrix
# navie computation, high memory footprint (quadtratic in n^2)
l_naive = np.trace(X @ np.linalg.inv(X) @ X)
@gabrieldernbach
gabrieldernbach / flow_matching.py
Created September 9, 2024 17:40
flow matching in 60 line of code
import torch
import torch.nn as nn
from sklearn.datasets import make_moons
from tqdm import tqdm
import matplotlib.pyplot as plt
class Flow(nn.Module):
def __init__(self, n_dim=2, n_pos_dim=2, n_hidden=64):
super().__init__()
self.n_dim = n_dim
@gabrieldernbach
gabrieldernbach / mine.py
Created November 27, 2024 21:21
mine.py
class MINE(nn.Module):
"""
Stub to implement Mutual Information Neural Estimation.
See https://arxiv.org/pdf/1801.04062
Quote:
We argue that the estimation of mutual information
between high dimensional continuous random variables can be
achieved by gradient descent over neural networks. We present a