Created
July 12, 2018 23:25
-
-
Save securetorobert/ca1bc10b0bc707ac7e2ff4361fbe4e21 to your computer and use it in GitHub Desktop.
Optimizing loss functions in TensorFlow
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import numpy as np | |
import tensorflow as tf | |
from tensorflow.python.ops import confusion_matrix | |
from tensorflow.python.ops import math_ops | |
train_df = pd.read_csv('./train.csv', index_col='ID') | |
y = train_df['medv'].values | |
y = y.reshape(-1, 1) | |
train_df['constant'] = 1 | |
columns = ['constant', 'rm', 'zn', 'indus'] | |
x = train_df[columns].values | |
#create variables and make w trainable | |
w = tf.Variable([[0],[0],[0],[0]], trainable=True, dtype=tf.float64) # this is needed to make the weights trainable | |
x = tf.convert_to_tensor(x) | |
y = tf.convert_to_tensor(y) | |
#make a prediction | |
y_pred = tf.matmul(x, w) | |
#define our loss function | |
mse = tf.losses.mean_squared_error(y, y_pred) # the loss function | |
#instantiate our optimizer | |
adam = tf.train.AdamOptimizer(learning_rate=0.3) # the optimizer | |
#create a step of gradient descent | |
a = adam.minimize(mse, var_list=w) # this runs one step of gradient descent | |
#compute our mean squared error | |
l, p = confusion_matrix.remove_squeezable_dimensions(y, y_pred) | |
s = math_ops.square(p - l) | |
mean_t = math_ops.reduce_mean(s) | |
print(mean_t) | |
#our session | |
with tf.Session() as sess: | |
sess.run(tf.global_variables_initializer()) | |
for i in range(200): | |
sess.run(a) # run for the number of training steps | |
w = sess.run(w) | |
print(w) # this will output our current weights after training | |
e_val = sess.run(mean_t) # compute our MSE | |
print(e_val) # print our MSE. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment