Skip to content

Instantly share code, notes, and snippets.

View thomasahle's full-sized avatar
♟️

Thomas Dybdahl Ahle thomasahle

♟️
View GitHub Profile
@thomasahle
thomasahle / soft_topk_bce.py
Created December 6, 2023 20:58
Soft TopK with BCE loss
import torch
from torch.autograd import Function
import torch.nn.functional as F
@torch.no_grad()
def _find_ts(xs, ks, binary_iter=16, newton_iter=1):
n = xs.shape[-1]
assert torch.all((0 < ks) & (ks < n)), "We don't support k=0 or k=n"
# Lo should be small enough that all sigmoids are in the 0 area.
################################################################################
# NFA Implementation using greenery
################################################################################
import greenery
from greenery import rxelems as rx
from collections import defaultdict
class State:
def __init__(self, is_accept=False):
import dspy
from pydantic import BaseModel
from typing import List
class State(BaseModel):
name: str
abbreviation: str
capital: str
class States(BaseModel):
import torch
import torch.nn as nn
import torch.optim as optim
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import tqdm
# Generate random dataset
np.random.seed(0)
import networkx as nx
import matplotlib.pyplot as plt
import random
import matplotlib.animation as animation
import numpy as np
# Step 1: Create the graph
G = nx.MultiGraph()
edges = [
(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (2, 6), (3, 4), (3, 5), (3, 6),
from collections import Counter
from manim import *
import networkx as nx
import random
import numpy as np
import itertools
class UF:
def __init__(self, ids):
from collections import Counter
from manim import *
import networkx as nx
import random
import numpy as np
import itertools
class UF:
def __init__(self, ids):
from collections import Counter
from manim import *
import networkx as nx
import random
import numpy as np
import itertools
class UF:
def __init__(self, ids):
@thomasahle
thomasahle / mlps.py
Created July 7, 2024 17:30
Randomly Initialized MLPs with Different Activation Functions
import torch
import math
from torch import nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.colors as mcolors
import numpy as np
import seaborn as sns
import instructor
from pydantic import BaseModel, Field
from typing import overload, Union, Literal, Generator
from tqdm.asyncio import tqdm
import asyncio
import numpy as np
import json
import os, sys
import diskcache, inspect, functools
import random