Skip to content

Instantly share code, notes, and snippets.

@darden1
darden1 / train_with_sklearn_mlp.py
Last active April 16, 2018 10:45
train_with_sklearn_mlp.py
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score
# 教師データを通常の表記に戻す(sikit-learnの多クラスロジスティック回帰ではonehotにしなくてよい)
y_train = np.array([np.argmax(yi) for yi in Y_train])
y_test = np.array([np.argmax(yi) for yi in Y_test])
batch_size = int(len(X_train)*0.2) # ミニバッチサイズ
epochs = 100 # エポック数
mu = 0.05 # 学習率
@darden1
darden1 / train_with_my_mlp.py
Created April 11, 2018 13:35
train_with_my_mlp.py
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.preprocessing import OneHotEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
# アヤメデータセット
iris = datasets.load_iris()
X = iris.data
@darden1
darden1 / my_multi_layer_perceptron.py
Last active December 22, 2018 14:10
my_multi_layer_perceptron.py
class MultiLayerPerceptron():
def __init__(self, hidden_layer_sizes=(10,), activation=“relu”, random_state=0):
self.init_state = True # 重みの初期化判定フラグ
self.dense = None # 全結合インスタンスリスト
self.act_func = None # 活性化関数インスタンスリスト
self.n_units = None # 各レイヤーのユニット数リスト
self.loss = None # トレーニングデータのLoss
self.val_loss = None # テストデータのLoss
self.acc = None # トレーニングデータの正答率
@darden1
darden1 / dense.py
Last active November 8, 2018 15:30
dense.py
class Dense:
def __init__(self, units, input_dim, kernel_initializer='he_normal', bias_initializer='zeros'):
self.units = units
self.input_dim = input_dim
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.W = None
self.b = None
self.dW = None
self.db = None
@darden1
darden1 / softmax.py
Last active April 11, 2018 13:47
softmax.py
class SoftMax():
def __init__(self, name="softmax"):
self.name = name
def forward_prop(self, Z):
self.Z = Z
return self.softmax(Z)
def back_prop(self, Z, Y):
return self.grad_softmax_with_loss(Z, Y)
@darden1
darden1 / activation.py
Created April 11, 2018 13:31
activation.py
# -*- coding: utf-8 -*-
import numpy as np
class Activation():
def __init__(self, name="sigmoid"):
self.name = name
def forward_prop(self, Z):
if self.name=="sigmoid":
return self.sigmoid(Z)
@darden1
darden1 / onehot_matrix_check.py
Created April 11, 2018 10:37
onehot_matrix_check.py
import numpy as np
n = 3
m = 3
B = np.random.rand(n,m)
Y = np.zeros_like(B)
for i in range(Y.shape[0]):
Y[i, np.random.choice(np.arange(Y.shape[1]), 1)] = 1
@darden1
darden1 / sklearn_multi_class_ logistic_regression.py
Created January 26, 2018 13:50
sklearn_multi_class_ logistic_regression.py
# -*- coding: utf-8 -*-
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
# 教師データを通常の表記に戻す(sikit-learnの多クラスロジスティック回帰ではonehotにしなくてよい)
y_train = np.array([np.argmax(yi) for yi in Y_train])
y_test = np.array([np.argmax(yi) for yi in Y_test])
# 正則化項:1/C/2*W^2 →C = ∞で正則化0
clf_sk = LogisticRegression(C=1e10, max_iter=500, solver="sag", tol=1e-10)
@darden1
darden1 / my_multi_class_ logistic_regression2.py
Created January 26, 2018 13:49
my_multi_class_ logistic_regression2.py
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.preprocessing import OneHotEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
# アヤメデータセット
@darden1
darden1 / my_multi_class_ logistic_regression1.py
Last active January 26, 2018 13:50
my_multi_class_ logistic_regression1.py
# -*- coding: utf-8 -*-
import numpy as np
class MultiClassLogisticRegression():
def __init__(self):
self.init_state = True # 重みの初期化判定フラグ
self.W = None # 重み
self.b = None # 閾値
self.loss = np.array([]) # トレーニングデータのLoss