以下をuBlacklistに加える
https://gist.githubusercontent.com/moskomule/3bddfc5deb5845fccc0d7c2519c600ce/raw/uBlacklist.txt
以下をuBlacklistに加える
https://gist.githubusercontent.com/moskomule/3bddfc5deb5845fccc0d7c2519c600ce/raw/uBlacklist.txt
A simple Python raytracer that supports spheres with configurable "material" properties (base color and a bunch of
light coefficients). To generate a raytraced image of the pre-defined scene, run: python raytracer.py
and open
image.ppm
with a PPM-compatible viewer (eog
works fine on Linux):
I found the following resources extremely helpful:
import torch.utils.data as data | |
import os | |
import re | |
import torch | |
import tarfile | |
from PIL import Image | |
IMG_EXTENSIONS = ['.png', '.jpg', '.jpeg'] |
cat results/*/*.json | grep -o -E 'args": "[^"]+' | grep -o -E '\-\-loss.*' >f1.txt | |
cat results/*/*.json | grep -o -E 'args": "[^"]+' | grep -o -E '\-\-dataset[^\-]+' > f2.txt | |
cat results/*/*.json | grep -o -E 'miou_test": \[\[[^"]+' | grep -o -E '29, [^]]+' > f3.txt | |
paste f1.txt f2.txt f3.txt | sed -e 's/--loss/|/g' | sed -e 's/--batch_size/|/g' | sed -e 's/29,/|/g'| sed -e 's/--dataset/|/g' | sort | |
# | |
# | ce | 1 | sbd | 0.7214421629905701 | |
# | ce | 1 | voc | 0.6946691870689392 | |
# | ce | 2 | sbd | 0.7422053217887878 | |
# | ce | 2 | voc | 0.7283321619033813 |
def Rop(y, x, v): | |
"""Computes an Rop. | |
Arguments: | |
y (Variable): output of differentiated function | |
x (Variable): differentiated input | |
v (Variable): vector to be multiplied with Jacobian from the right | |
""" | |
w = torch.ones_like(y, requires_grad=True) | |
return torch.autograd.grad(torch.autograd.grad(y, x, w), w, v) |
wget -O grive https://docs.google.com/uc?id=0B3X9GlR6EmbnQ0FtZmJJUXEyRTA
chmod 755 grive # not sure if it's necessary
./gdrive upload [--recursive] NAME
#!/bin/bash | |
# download and unzip dataset | |
#wget http://cs231n.stanford.edu/tiny-imagenet-200.zip | |
unzip tiny-imagenet-200.zip | |
current="$(pwd)/tiny-imagenet-200" | |
# training data | |
cd $current/train |