Before training:
last = model.state_dict()Inside training loop, after computing loss:
if torch.isnan(loss).sum().item():
model.load_state_dict(last)
else:| from IPython.display import set_matplotlib_formats | |
| set_matplotlib_formats('pdf', 'svg') | |
| import matplotlib | |
| import seaborn as sns | |
| sns.set(font_scale=1.3) | |
| sns.set_style("darkgrid", {"axes.facecolor": ".95"}) | |
| import matplotlib.pyplot as plt |
| #!/bin/bash | |
| ################################## | |
| # | |
| # THE ARCHIVE TRACKER | |
| # | |
| # REF: https://gist.github.com/markwk/c85a8a72bc8c03d0f510262bb5219a34/ | |
| # | |
| # INTRODUCTION: | |
| # Daily script to navigate to a directory of plain text files, | |
| # add files to git repo, calculate key stats, store stats to csv |
| set nocompatible " be iMproved, required | |
| let g:python3_host_prog = '/usr/local/opt/python@3.8/bin/python3.8' | |
| if empty(glob('~/.vim/autoload/plug.vim')) | |
| silent !curl -fLo ~/.vim/autoload/plug.vim --create-dirs | |
| \ https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim | |
| autocmd VimEnter * PlugInstall --sync | source $MYVIMRC | |
| endif | |
| call plug#begin('~/.vim/plugged') |
| %Make sure to have \usepackage{tikz} | |
| %https://tex.stackexchange.com/a/45815/140440 - for grid | |
| %https://tex.stackexchange.com/a/381175/140440 - for alignment in equation | |
| % This function draws a matrix. | |
| \newcommand{\mat}[2]{% cols, rows | |
| \vcenter{\hbox{ %Vertical alignment | |
| \begin{tikzpicture}[scale=0.3, align=center] |
| import numpy as np | |
| from mpmath import mp, mpmathify | |
| from pysr import * | |
| #Set precision to 200 decimal places: | |
| mp.dps = 200 | |
| x = np.linspace(-10, -5, num=300) | |
| #High precision calculation: |
| # Copy this into your code. Call with, e.g., einop(x, 'i j -> j', reduction='mean') | |
| import functools | |
| import einops as _einops | |
| from einops.parsing import ParsedExpression | |
| @functools.lru_cache(256) | |
| def _match_einop(pattern: str, reduction=None, **axes_lengths: int): | |
| """Find the corresponding operation matching the pattern""" | |
| left, rght = pattern.split('->') | |
| left = ParsedExpression(left) |
| using SymbolicUtils | |
| mutable struct Node | |
| #Holds operators, variables, constants in a tree | |
| degree::Integer #0 for constant/variable, 1 for cos/sin, 2 for +/* etc. | |
| val::Union{Float32, Integer, Nothing} #Either const value, or enumerates variable | |
| constant::Bool #false if variable | |
| op::Integer #enumerates operator (separately for degree=1,2) | |
| l::Union{Node, Nothing} | |
| r::Union{Node, Nothing} |
Before training:
last = model.state_dict()Inside training loop, after computing loss:
if torch.isnan(loss).sum().item():
model.load_state_dict(last)
else:| def acos2(num, denom, disamb): | |
| cosine = num/denom | |
| return torch.where((cosine > -1) & (cosine < 1.), | |
| torch.acos(cosine) * torch.where(disamb < 0.0, -1, 1), | |
| torch.where(cosine <= -1, np.pi, 0.0) | |
| ) | |
| def coord_transform(x): | |
| # Assumes in CoM frame |
| def soft_clip(x, lo, hi, pct=0.1): | |
| range = hi - lo | |
| frac = (x - lo) / range | |
| normalization = F.softplus(torch.ones_like(x)) | |
| for _ in ['lo', 'hi']: | |
| frac = torch.where(frac > pct, | |
| frac, | |
| pct * F.softplus(frac / pct) / normalization |