git clone [email protected]:YOUR-USERNAME/YOUR-FORKED-REPO.git
cd into/cloned/fork-repo
git remote add upstream git://github.com/ORIGINAL-DEV-USERNAME/REPO-YOU-FORKED-FROM.git
git fetch upstream
git clone [email protected]:YOUR-USERNAME/YOUR-FORKED-REPO.git
cd into/cloned/fork-repo
git remote add upstream git://github.com/ORIGINAL-DEV-USERNAME/REPO-YOU-FORKED-FROM.git
git fetch upstream
{ | |
"mappings": { | |
"docs": { | |
"dynamic": true, | |
"properties": { | |
"time": { | |
"type": "date", | |
"format": "yyyyMMdd", | |
"store": "true" | |
}, |
###byte pair encoding | |
###Neural Machine Translation of Rare Words with Subword Units | |
###from https://plmsmile.github.io/2017/10/19/subword-units/ | |
import re | |
def process_raw_words(words, endtag='-'): | |
'''把单词分割成最小的符号,并且加上结尾符号''' | |
vocabs = {} | |
for word, count in words.items(): | |
# 加上空格 | |
word = re.sub(r'([a-zA-Z])', r' \1', word) |
import argparse | |
import os | |
import shutil | |
import time | |
import torch | |
import torch.nn as nn | |
import torch.nn.parallel | |
import torch.backends.cudnn as cudnn | |
import torch.optim |
# This is an example for the CIFAR-10 dataset. | |
# There's a function for creating a train and validation iterator. | |
# There's also a function for creating a test iterator. | |
# Inspired by https://discuss.pytorch.org/t/feedback-on-pytorch-for-kaggle-competitions/2252/4 | |
from utils import plot_images | |
def get_train_valid_loader(data_dir, | |
batch_size, | |
augment, |
#from https://rosettacode.org/wiki/LU_decomposition#Python | |
from pprint import pprint | |
def matrixMul(A, B): | |
TB = zip(*B) | |
return [[sum(ea*eb for ea,eb in zip(a,b)) for b in TB] for a in A] | |
def pivotize(m): | |
"""Creates the pivoting matrix for m.""" | |
n = len(m) |
### Adapted from TF repo | |
import tensorflow as tf | |
from tensorflow import gradients | |
from tensorflow.python.framework import ops | |
from tensorflow.python.ops import array_ops | |
from tensorflow.python.ops import math_ops | |
def hessian_vector_product(ys, xs, v): |
import numpy as np | |
import matplotlib.pyplot as plt | |
import torch | |
import torch.nn as nn | |
import torch.optim as optim | |
import torch.nn.functional as F | |
from torch.autograd import Variable | |
import torchvision | |
import torchvision.transforms as transforms | |
import numpy as np |
#plot tiled images | |
fig = plt.figure(figsize=(8,8)) | |
#adjust the white space around the figure and each subplot | |
plt.subplots_adjust(wspace=0.01, hspace=0.01, left=0, right=1, bottom=0, top=1) | |
for i in range(63): | |
ax = plt.subplot(8,8,i+1) | |
plt.imshow(imgs[i]) | |
ax.axis('off') #no frame | |
#ax.get_xaxis().set_visible(False) | |
#ax.get_yaxis().set_visible(False) |
import numpy as np | |
from scipy.ndimage.interpolation import map_coordinates | |
from scipy.ndimage.filters import gaussian_filter | |
def elastic_transform(image, alpha, sigma, random_state=None): | |
"""Elastic deformation of images as described in [Simard2003]_. | |
.. [Simard2003] Simard, Steinkraus and Platt, "Best Practices for | |
Convolutional Neural Networks applied to Visual Document Analysis", in |