Scripts to add to your Colab Notebook to sync with an S3 bucket that contains code to run.
- Write AWS credentials to disk in order to use
s3fs
%%writefile ~/.passwd-s3fs
:
@functools.partial(nn.scan, | |
variable_broadcast='params', | |
split_rngs={'params': False}) | |
@nn.remat | |
@nn.compact | |
def __call__(self, carry, t: int): | |
noise, memory, latent_code = carry['noise'], carry['memory'], carry[ | |
'latent_code'] | |
intermediates = {} |
import matplotlib.pyplot as plt | |
import numpy as np | |
def normalize_freq_coeff(coeff): | |
freqs = np.fft.fftfreq(coeff.shape[-1], d=1 / coeff.shape[-1]) | |
scale = (1 / (np.abs(freqs) + 1)) | |
return coeff * scale | |
videoId = 'QRE0GxT6Zbw' | |
response = requests.get(f'http://video.google.com/timedtext', | |
params={ | |
'lang': 'en', | |
'v': videoId, | |
'fmt': 'vtt' | |
}) | |
print(response.text) |
import tensorflow as tf | |
import tensorflow_text as text | |
from transformers import AutoTokenizer | |
def get_tf_tokenizer(hf_model_name, do_test=False): | |
hf_tokenizer = AutoTokenizer.from_pretrained(hf_model_name) | |
model_proto = hf_tokenizer.sp_model.serialized_model_proto() | |
tf_tokenizer = text.SentencepieceTokenizer(model=model_proto, out_type=tf.int32) | |
if do_test: | |
test_string = "This is a testtt, hah! reaaly cool :)" |
"""Running this uploads all Python modules in this workspace to an S3 bucket, to | |
access when training on Colab. | |
""" | |
import os | |
import boto3 | |
S3_BUCKET_NAME = 'BUCKET-NAME' | |
# files in the local directory to upload as code. (yaml or json are often used for config) |
"""Originally from https://www.kaggle.com/cdeotte/rotation-augmentation-gpu-tpu-0-96#Data-Augmentation, | |
modified to be in pure TensorFlow and to work on a batch of images rather than a single image. | |
(For a tf.data pipeline, you may want to look at the original code at the link above.) | |
""" | |
import math | |
import tensorflow as tf | |
def transform_batch(images, |
"""Client class to interface with FoodData Central. | |
FoodData Central requires a Data.gov key: https://api.data.gov/signup/ | |
""" | |
from enum import Enum | |
import json | |
import requests | |
BASE_URL = 'https://api.nal.usda.gov/fdc/v1' |
I hereby claim:
To claim this, I am signing this object:
"""Small convnet with residual connections. | |
inspired by https://gist.github.com/mjdietzx/0cb95922aac14d446a6530f87b3a04ce, | |
which builds a full ResNet-50 or ResNeXt-50 model | |
""" | |
NUM_CLASSES = 2 | |
from keras.layers import BatchNormalization, Conv2D, LeakyReLU, Input, MaxPool2D, Dense, Flatten, Dropout | |
from keras.models import Model | |
def add_common_layers(y): |