From June 26, 2016 (python 3.5.2 release) to Aug. 31, 2016.
Python versions from 2.6 to 3.5
Without 2.7
| import theano | |
| import theano.tensor as T | |
| from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams | |
| from theano.tensor.signal.downsample import max_pool_2d | |
| from theano.tensor.extra_ops import repeat | |
| from theano.sandbox.cuda.dnn import dnn_conv | |
| from time import time | |
| import numpy as np | |
| from matplotlib import pyplot as plt |
| # Alec Radford, Indico, Kyle Kastner | |
| # License: MIT | |
| """ | |
| Convolutional VAE in a single file. | |
| Bringing in code from IndicoDataSolutions and Alec Radford (NewMu) | |
| Additionally converted to use default conv2d interface instead of explicit cuDNN | |
| """ | |
| import theano | |
| import theano.tensor as T | |
| from theano.compat.python2x import OrderedDict |
| """ | |
| Minimal character-level Vanilla RNN model. Written by Andrej Karpathy (@karpathy) | |
| BSD License | |
| """ | |
| import numpy as np | |
| # data I/O | |
| data = open('input.txt', 'r').read() # should be simple plain text file | |
| chars = list(set(data)) | |
| data_size, vocab_size = len(data), len(chars) |
| """ Deep Auto-Encoder implementation | |
| An auto-encoder works as follows: | |
| Data of dimension k is reduced to a lower dimension j using a matrix multiplication: | |
| softmax(W*x + b) = x' | |
| where W is matrix from R^k --> R^j | |
| A reconstruction matrix W' maps back from R^j --> R^k |
| '''This script goes along the blog post | |
| "Building powerful image classification models using very little data" | |
| from blog.keras.io. | |
| It uses data that can be downloaded at: | |
| https://www.kaggle.com/c/dogs-vs-cats/data | |
| In our setup, we: | |
| - created a data/ folder | |
| - created train/ and validation/ subfolders inside data/ | |
| - created cats/ and dogs/ subfolders inside train/ and validation/ | |
| - put the cat pictures index 0-999 in data/train/cats |
| '''This script goes along the blog post | |
| "Building powerful image classification models using very little data" | |
| from blog.keras.io. | |
| It uses data that can be downloaded at: | |
| https://www.kaggle.com/c/dogs-vs-cats/data | |
| In our setup, we: | |
| - created a data/ folder | |
| - created train/ and validation/ subfolders inside data/ | |
| - created cats/ and dogs/ subfolders inside train/ and validation/ | |
| - put the cat pictures index 0-999 in data/train/cats |
| """ | |
| A weighted version of categorical_crossentropy for keras (2.0.6). This lets you apply a weight to unbalanced classes. | |
| @url: https://gist.github.com/wassname/ce364fddfc8a025bfab4348cf5de852d | |
| @author: wassname | |
| """ | |
| from keras import backend as K | |
| def weighted_categorical_crossentropy(weights): | |
| """ | |
| A weighted version of keras.objectives.categorical_crossentropy | |
| #!/usr/bin/env bash | |
| # Path to the project directory (that should include requirements.txt), | |
| # Files and directories within that need to be deployed. | |
| project=../backend | |
| contents=(module lamdba_handler.py) | |
| # Unnecessary parts. Note that there are some inter-dependencies in SciPy, | |
| # for example to use scipy.stats you also need scipy.linalg, scipy.integrate, | |
| # scipy.misc, scipy.sparse, and scipy.special. |
| from scipy.stats import norm, shapiro, kstest, anderson | |
| import bokeh.plotting as bplt | |
| from bokeh import layouts | |
| from bokeh.charts import Histogram, Scatter | |
| from bokeh.models import Span | |
| import pandas as pd | |
| import numpy as np | |
| def vertical_histogram(y): |