start new:
tmux
start new with session name:
tmux new -s myname
def mi_linear(arg1, arg2, output_size, global_bias_start=0.0, scope=None): | |
"""Multiplicated Integrated Linear map: | |
See http://arxiv.org/pdf/1606.06630v1.pdf | |
A * (W[0] * arg1) * (W[1] * arg2) + (W[0] * arg1 * bias1) + (W[1] * arg2 * bias2) + global_bias. | |
Args: | |
arg1: batch x n, Tensor. | |
arg2: batch x n, Tensor. | |
output_size: int, second dimension of W[i]. | |
global_bias_start: starting value to initialize the global bias; 0 by default. | |
scope: VariableScope for the created subgraph; defaults to "MILinear". |
import hashlib | |
import argparse | |
import subprocess | |
parser = argparse.ArgumentParser() | |
parser.add_argument('--md5', type=str, required=True) | |
args = parser.parse_args() | |
script_to_run = 'some.py' |
import json | |
import tensorflow as tf | |
from tensorflow.python.client import timeline | |
x = tf.random_normal([1000, 1000]) | |
y = tf.random_normal([1000, 1000]) | |
res = tf.matmul(x, y) | |
# Run the graph with full trace option |
def composite_function(self, _input, out_features, kernel_size=3): | |
"""Function from paper H_l that performs: | |
- batch normalization | |
- ReLU nonlinearity | |
- convolution with required kernel | |
- dropout, if required | |
""" | |
with tf.variable_scope("composite_function"): | |
# BN | |
output = self.batch_norm(_input) |
import tensorflow as tf | |
class MultithreadedTensorProvider(): | |
""" A class designed to provide tensors input in a | |
separate threads. """ | |
def __init__(self, capacity, sess, dtypes, shuffle_queue=False, | |
number_of_threads=1): |
import tensorflow as tf | |
from tensorflow.python.client import timeline | |
a = tf.random_normal([2000, 5000]) | |
b = tf.random_normal([5000, 1000]) | |
res = tf.matmul(a, b) | |
with tf.Session() as sess: | |
# add additional options to trace the session execution | |
options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) |
import os | |
import tempfile | |
import tensorflow as tf | |
from tensorflow.contrib.layers import fully_connected as fc | |
from tensorflow.examples.tutorials.mnist import input_data | |
from tensorflow.python.client import timeline | |
batch_size = 100 |
import json | |
class TimeLiner: | |
_timeline_dict = None | |
def update_timeline(self, chrome_trace): | |
# convert crome trace to python dict | |
chrome_trace_dict = json.loads(chrome_trace) | |
# for first run store full trace |
import torch | |
import numpy as np | |
numpy_tensor = np.random.randn(10, 20) | |
# convert numpy array to pytorch array | |
pytorch_tensor = torch.Tensor(numpy_tensor) | |
# or another way | |
pytorch_tensor = torch.from_numpy(numpy_tensor) |