This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
from sklearn.utils import shuffle | |
# Data comes from y = f(x) = [2, 3].x + [5, 7] | |
X0 = np.random.randn(100, 2) - 1 | |
X1 = np.random.randn(100, 2) + 1 | |
X = np.vstack([X0, X1]) | |
t = np.vstack([np.zeros([100, 1]), np.ones([100, 1])]) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from __future__ import print_function | |
import struct | |
import gdb | |
def log(): | |
# Get the inferior. | |
try: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
def jacobian(y, x, create_graph=False): | |
jac = [] | |
flat_y = y.reshape(-1) | |
grad_y = torch.zeros_like(flat_y) | |
for i in range(len(flat_y)): | |
grad_y[i] = 1. | |
grad_x, = torch.autograd.grad(flat_y, x, grad_y, retain_graph=True, create_graph=create_graph) | |
jac.append(grad_x.reshape(x.shape)) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import sys | |
from collections import OrderedDict | |
PY2 = sys.version_info[0] == 2 | |
_internal_attrs = {'_backend', '_parameters', '_buffers', '_backward_hooks', '_forward_hooks', '_forward_pre_hooks', '_modules'} | |
class Scope(object): | |
def __init__(self): | |
self._modules = OrderedDict() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
### JHW 2018 | |
import numpy as np | |
import umap | |
# This code from the excellent module at: | |
# https://stackoverflow.com/questions/4643647/fast-prime-factorization-module | |
import random |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
# handle pytorch tensors etc, by using tensorboardX's method | |
try: | |
from tensorboardX.x2num import make_np | |
except ImportError: | |
def make_np(x): | |
return np.array(x).copy().astype('float16') | |
class RunningStats(object): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.optim as optim | |
import matplotlib.pyplot as plt | |
# 2d Rosenbrock function | |
def f(x): | |
return (1 - x[0])**2 + 100 * (x[1] - x[0]**2)**2 | |
OlderNewer