$ xvfb-run -s "-screen 0 1400x900x24" jupyter notebook
import matplotlib.pyplot as plt
%matplotlib inline
def show_state(env, step=0):
# Create a saliency map for each data point | |
for i, image in enumerate(data): | |
# Forward pass on image | |
# Note: the activations are saved on each layer | |
output = image | |
for l in range(len(network.layers)): | |
output = network.layers[l].forward(output) | |
# Backprop to get gradient | |
label_one_hot = labels[i] |
# Create a saliency map for each data point | |
for i, image in enumerate(data): | |
# Forward pass on image | |
# Note: the activations from this are saved on each layer | |
output = image | |
for l in range(len(network.layers)): | |
output = network.layers[l].forward(output) | |
# Backprop to get gradient | |
label_one_hot = labels[i] |
# Backprop to get gradient | |
label_one_hot = labels[i] | |
dy = np.array(label_one_hot) | |
for l in range(len(network.layers)-1, -1, -1): | |
dout = network.layers[l].backward(dy) | |
dy = dout |
# Create a saliency map for each data point | |
for i, image in enumerate(data): | |
# Run a forward pass with an image | |
output = image | |
for l in range(len(network.layers)): | |
output = network.layers[l].forward(output) |
from model.data import mnist_train_test_sets | |
from model.network import LeNet5 | |
from saliency.vanilla_gradient import save_vanilla_gradient | |
# Get MNIST dataset, preprocessed | |
train_images, train_labels, test_images, test_labels = mnist_train_test_sets() | |
# Load net with 98% acc weights | |
net = LeNet5(weights_path="15epoch_weights.pkl") | |
# Generate saliency maps for the first 10 images |
# From CLI: | |
# hyperdash run -n 'mymodel' python mymodel.py | |
import hyperdash as hd | |
learning_rate = hd.param('learning rate', default=0.01) # Setup hyperparameters | |
# Model code here | |
hd.metric('loss', training_loss) # Record a metric | |
# Params and metrics are pretty printed at end of experiment | |
I hereby claim:
To claim this, I am signing this object:
#!/bin/sh | |
# See video https://www.youtube.com/watch?v=7PO27i2lEOs | |
set -e | |
command_exists () { | |
type "$1" &> /dev/null ; | |
} |
{ | |
"name": "ReactiveCocoa", | |
"version": "4.2.2", | |
"summary": "A framework for composing and transforming streams of values.", | |
"description": "ReactiveCocoa (RAC) is an Objective-C framework for Functional Reactive Programming.\nIt provides APIs for composing and transforming streams of values.", | |
"homepage": "https://github.com/ReactiveCocoa/ReactiveCocoa", | |
"license": { | |
"type": "MIT", | |
"file": "LICENSE.md" | |
}, |