One Paragraph of project description goes here
These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
# Tiny example of 3-layer nerual network with dropout in 2nd hidden layer | |
# Output layer is linear with L2 cost (regression model) | |
# Hidden layer activation is tanh | |
import numpy as np | |
n_epochs = 100 | |
n_samples = 100 | |
n_in = 10 | |
n_hidden = 5 |
# | |
# fashion_mnist_theano.py | |
# date. 10/2/2017 | |
# | |
# REM: I read the article for stopping development of "THEANO". | |
# The deep learning framework stimulated me and made me write codes. | |
# I'd like to say thank you to Theano supporting team. | |
# | |
import os |
from sklearn import datasets | |
from sklearn.model_selection import train_test_split | |
from sklearn.model_selection import cross_val_score | |
X, y = datasets.make_classification(n_samples=10000, n_features=20, | |
n_informative=2, n_redundant=10, | |
random_state=42) | |
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, | |
random_state=42) |
# code: https://github.com/dmlc/xgboost/blob/master/demo/gpu_acceleration/cover_type.py | |
import xgboost as xgb | |
import numpy as np | |
from sklearn.datasets import fetch_covtype | |
from sklearn.model_selection import train_test_split | |
import time | |
# Fetch dataset using sklearn | |
cov = fetch_covtype() | |
X = cov.data |
import numpy as np | |
np.set_printoptions(2) | |
ground_truth_prob = np.random.rand(10) | |
number_of_levers_count = np.zeros(10) | |
agents_prob = np.zeros(10) | |
num_episode = 4000 | |
e = 0.33 |
class SOM_Layer(): | |
def __init__(self,m,n,dim,num_epoch,learning_rate_som ,radius_factor ,gaussian_std): | |
self.m = m | |
self.n = n | |
self.dim = dim | |
self.gaussian_std = gaussian_std | |
self.num_epoch = num_epoch | |
self.map = tf.Variable(tf.random_normal(shape=[m*n,dim],stddev=0.05)) |
class SOM_Layer(): | |
def __init__(self,m,n,dim,num_epoch,learning_rate_som ,radius_factor ,gaussian_std): | |
self.m = m | |
self.n = n | |
self.dim = dim | |
self.gaussian_std = gaussian_std | |
self.num_epoch = num_epoch | |
self.map = tf.Variable(tf.random_normal(shape=[m*n,dim],stddev=0.05)) |