A personal diary of DataFrame munging over the years.
Convert Series datatype to numeric (will error if column has non-numeric values)
(h/t @makmanalp)
| class SelfAttention(nn.Module): | |
| def __init__(self, attention_size, batch_first=False, non_linearity="tanh"): | |
| super(SelfAttention, self).__init__() | |
| self.batch_first = batch_first | |
| self.attention_weights = Parameter(torch.FloatTensor(attention_size)) | |
| self.softmax = nn.Softmax(dim=-1) | |
| if non_linearity == "relu": | |
| self.non_linearity = nn.ReLU() |
| from gensim.models import KeyedVectors | |
| # Load gensim word2vec | |
| w2v_path = '<Gensim File Path>' | |
| w2v = KeyedVectors.load_word2vec_format(w2v_path) | |
| import io | |
| # Vector file, `\t` seperated the vectors and `\n` seperate the words | |
| """ |
| from keras import backend as K | |
| from keras.engine import InputSpec | |
| from keras.engine.topology import Layer | |
| import numpy as np | |
| class TemporalMaxPooling(Layer): | |
| """ | |
| This pooling layer accepts the temporal sequence output by a recurrent layer | |
| and performs temporal pooling, looking at only the non-masked portion of the sequence. |
| #!/usr/bin/env bash | |
| set -x -e | |
| JUPYTER_PASSWORD=${1:-"myJupyterPassword"} | |
| NOTEBOOK_DIR=${2:-"s3://myS3Bucket/notebooks/"} | |
| # home backup | |
| if [ ! -d /mnt/home_backup ]; then | |
| sudo mkdir /mnt/home_backup | |
| sudo cp -a /home/* /mnt/home_backup |
| from keras import backend as K, initializers, regularizers, constraints | |
| from keras.engine.topology import Layer | |
| def dot_product(x, kernel): | |
| """ | |
| Wrapper for dot product operation, in order to be compatible with both | |
| Theano and Tensorflow | |
| Args: |
| from keras.engine.topology import Layer | |
| from keras import initializations | |
| from keras import backend as K | |
| class Attention(Layer): | |
| '''Attention operation for temporal data. | |
| # Input shape | |
| 3D tensor with shape: `(samples, steps, features)`. | |
| # Output shape | |
| 2D tensor with shape: `(samples, features)`. |
| class AttentionLSTM(LSTM): | |
| """LSTM with attention mechanism | |
| This is an LSTM incorporating an attention mechanism into its hidden states. | |
| Currently, the context vector calculated from the attended vector is fed | |
| into the model's internal states, closely following the model by Xu et al. | |
| (2016, Sec. 3.1.2), using a soft attention model following | |
| Bahdanau et al. (2014). | |
| The layer expects two inputs instead of the usual one: |
| from __future__ import print_function | |
| import imageio | |
| from PIL import Image | |
| import numpy as np | |
| import keras | |
| from keras.layers import Input, Dense, Conv2D, MaxPooling2D, AveragePooling2D, ZeroPadding2D, Dropout, Flatten, Concatenate, Reshape, Activation | |
| from keras.models import Model | |
| from keras.regularizers import l2 | |
| from keras.optimizers import SGD |
A personal diary of DataFrame munging over the years.
Convert Series datatype to numeric (will error if column has non-numeric values)
(h/t @makmanalp)
| import boto | |
| import boto.s3 | |
| import os.path | |
| import sys | |
| # Fill these in - you get them when you sign up for S3 | |
| AWS_ACCESS_KEY_ID = '' | |
| AWS_ACCESS_KEY_SECRET = '' | |
| # Fill in info on data to upload |