Created
May 8, 2016 13:39
-
-
Save anonymous/d7d6ee33e06ba1845dda94b5137dfba3 to your computer and use it in GitHub Desktop.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Getting error as
TypeError: list object is not an iterator
Having issue in both for python2.7 and python 3 .
Following is the code
%matplotlib inline
import pymc3 as pm
import theano.tensor as T
import theano
import sklearn
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import datasets
from sklearn.preprocessing import scale
from sklearn.cross_validation import train_test_split
from sklearn.datasets import make_moons, make_circles, make_classification
X, Y = make_moons(noise=0.2, random_state=0, n_samples=1000)
X = scale(X)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=.5)
Turn inputs and outputs into shared variables so that we can change them later
ann_input = theano.shared(X_train)
ann_output = theano.shared(Y_train)
n_hidden = 5
Initialize random weights.
init_1 = np.random.randn(X.shape[1], n_hidden)
init_2 = np.random.randn(n_hidden, n_hidden)
init_out = np.random.randn(n_hidden)
with pm.Model() as neural_network:
# Weights from input to hidden layer
weights_in_1 = pm.Normal('w_in_1', 0, sd=1,
shape=(X.shape[1], n_hidden),
testval=init_1)
with neural_network:
step = pm.Metropolis()
trace = pm.sample(10000, step=step)[5000:]
Replace shared variables with testing set
(note that using this trick we could be streaming ADVI for big data)
ann_input.set_value(X_test)
ann_output.set_value(Y_test)
Creater posterior predictive samples
ppc = pm.sample_ppc(trace, model=neural_network, samples=500)
pred = ppc['data'].mean(axis=0) > 0.5
plt.scatter(X_test[pred==0, 0], X_test[pred==0, 1])
plt.scatter(X_test[pred==1, 0], X_test[pred==1, 1], color='r')
plt.title('Predicted labels in testing set')
print('Accuracy = {}%'.format((Y_test == pred).mean() * 100))
Turn inputs and outputs into shared variables so that we can change them later
import theano.tensor as tt
Set back to original data to retrain
ann_input.set_value(X_train)
ann_output.set_value(Y_train)
Tensors and RV that will be using mini-batches
minibatch_tensors = np.array([ann_input, ann_output])
minibatch_RVs = np.array([out])
Generator that returns mini-batches in each iteration
def create_minibatch(data):
rng = np.random.RandomState(0)
minibatches = [
create_minibatch(X_train),
create_minibatch(Y_train),
]
print(type(minibatches))
print(minibatches[0])
print(minibatches[1])
total_size = len(Y_train)
%%time
print('running mini batch....')
with neural_network:
# Run advi_minibatch
v_params = pm.variational.advi_minibatch(
n=500, minibatch_tensors=minibatch_tensors,
minibatch_RVs=minibatch_RVs, minibatches=minibatches,
total_size=total_size, learning_rate=1e-2, epsilon=1.0
)
print('final...stage')
with neural_network:
trace = pm.variational.sample_vp(v_params, draws=5000)