This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/tensorflow/tensorflow/blob/v1.3.0/tensorflow/contrib/keras/python/keras/layers/recurrent.py#L1174 | |
class LSTM(Recurrent): | |
#... | |
def get_constants(self, inputs, training=None): | |
#... | |
ones = K.ones_like(K.reshape(inputs[:, 0, 0], (-1, 1))) | |
ones = K.tile(ones, (1, self.units)) | |
def dropped_inputs(): # pylint: disable=function-redefined | |
return K.dropout(ones, self.recurrent_dropout) | |
rec_dp_mask = [ |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/tensorflow/tensorflow/blob/v1.3.0/tensorflow/contrib/keras/python/keras/layers/recurrent.py#L1197 | |
class LSTM(Recurrent): | |
#... | |
def step(self, inputs, states): | |
#... | |
if self.implementation == 2: | |
#... | |
else: | |
if self.implementation == 0: | |
x_i = inputs[:, :self.units] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/tensorflow/tensorflow/blob/v1.3.0/tensorflow/contrib/keras/python/keras/layers/recurrent.py#L1197 | |
class LSTM(Recurrent): | |
#... | |
def step(self, inputs, states): | |
if self.implementation == 2: | |
z = K.dot(inputs * dp_mask[0], self.kernel) | |
z += K.dot(h_tm1 * rec_dp_mask[0], self.recurrent_kernel) | |
if self.use_bias: | |
z = K.bias_add(z, self.bias) | |
#... |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/salesforce/awd-lstm-lm/blob/dfd3cb0235d2caf2847a4d53e1cbd495b781b5d2/weight_drop.py#L5 | |
class WeightDrop(torch.nn.Module): | |
def __init__(self, module, weights, dropout=0, variational=False): | |
# ... | |
self._setup() | |
# ... | |
def _setup(self): | |
# Terrible temporary solution to an issue regarding compacting weights re: CUDNN RNN | |
if issubclass(type(self.module), torch.nn.RNNBase): | |
self.module.flatten_parameters = self.widget_demagnetizer_y2k_edition |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/salesforce/awd-lstm-lm/blob/dfd3cb0235d2caf2847a4d53e1cbd495b781b5d2/embed_regularize.py#L6 | |
def embedded_dropout(embed, words, dropout=0.1, scale=None): | |
if dropout: | |
mask = embed.weight.data.new().resize_((embed.weight.size(0), 1)).bernoulli_(1 - dropout).expand_as(embed.weight) / (1 - dropout) | |
mask = Variable(mask) | |
masked_embed_weight = mask * embed.weight | |
else: | |
masked_embed_weight = embed.weight | |
if scale: | |
masked_embed_weight = scale.expand_as(masked_embed_weight) * masked_embed_weight |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Model(nn.Module): | |
def __init__(self, nb_words, hidden_size=128, embedding_size=128, n_layers=1, | |
wdrop=0.25, edrop=0.1, idrop=0.25, batch_first=True): | |
super(Model, self).__init__() | |
# Modified LockedDropout that support batch first arrangement | |
self.lockdrop = LockedDropout(batch_first=batch_first) | |
self.idrop = idrop | |
self.edrop = edrop | |
self.n_layers = n_layers | |
self.embedding = nn.Embedding(nb_words, embedding_size) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
checkpoint::checkpoint("2017-10-26") | |
pacman::p_load(data.table) | |
pacman::p_load(caret) | |
pacman::p_load(ggplot2) | |
set.seed(998) | |
mushrooms <- fread("mushrooms.csv", stringsAsFactors=T) | |
mushrooms[, .N, by=class] | |
mushrooms[, eval("veil-type") := NULL] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pathlib | |
output_folder = "tmp/folder" | |
pathlib.Path(output_folder).mkdir(parents=True, exist_ok=True) |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# http://pytorch.org/docs/master/_modules/torch/optim/sgd.html#SGD | |
class SGD(Optimizer): | |
def __init__(self, params, lr=required, momentum=0, dampening=0, | |
weight_decay=0, nesterov=False): | |
# ... | |
def __setstate__(self, state): | |
# ... |