Last active
December 1, 2017 05:40
-
-
Save gyulkkajo/0507fc156bb3471c5d11f220e70fc4dd to your computer and use it in GitHub Desktop.
Tensorflow lecture
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#temp | humid | on:1,off:0 | |
---|---|---|---|
29 | 80 | 1 | |
24 | 90 | 1 | |
29 | 40 | 0 | |
32 | 10 | 1 | |
36 | 70 | 1 | |
33 | 20 | 1 | |
15 | 40 | 0 | |
24 | 70 | 0 | |
23 | 40 | 0 | |
27 | 80 | 1 | |
31 | 90 | 1 | |
32 | 50 | 1 | |
28 | 60 | 1 | |
23 | 60 | 0 | |
17 | 70 | 0 | |
14 | 80 | 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
import numpy as np | |
import matplotlib.pyplot as plt | |
# Format of dataset | |
# efficiency, capacity | |
# | |
# Goal : create linear regression model | |
from numpy.core.defchararray import capitalize | |
tf.set_random_seed(1) | |
efficiency, capacity = np.loadtxt('cars.csv', unpack=True, delimiter=',') | |
x = tf.placeholder(tf.float32) | |
y = tf.placeholder(tf.float32) | |
w = tf.Variable(tf.random_normal([1]), name='Weight') | |
b = tf.Variable(tf.random_normal([1]), name='Bias') | |
hy = w * x + b | |
cost = tf.reduce_mean(tf.square(hy - y)) | |
optimizer = tf.train.GradientDescentOptimizer(0.001) | |
train = optimizer.minimize(cost) | |
s = tf.Session() | |
s.run(tf.global_variables_initializer()) | |
ev_cost = 0 | |
ev_w = 0 | |
ev_b = 0 | |
for i in range(10000): | |
ev_cost, ev_w, ev_b, _ = s.run([cost, w, b, train], | |
feed_dict={ | |
x:efficiency, | |
y:capacity | |
}) | |
print('Result : cost={} w={} b={}'.format(ev_cost, ev_w, ev_b)) | |
test_effi = [10, 20, 30] | |
test_capa = [] | |
for e in test_effi: | |
c = ev_w * e + ev_b | |
test_capa.append(c) | |
print('Eff={} => Cap={}'.format(e, c)) | |
plt.scatter(efficiency, capacity) | |
plt.plot(test_effi, test_capa, 'r') | |
plt.show() | |
#Result : cost=5291.11328125 w=[-12.06719303] b=[ 463.78918457] | |
#Eff=10 => Cap=[ 343.11724854] | |
#Eff=20 => Cap=[ 222.44532776] | |
#Eff=30 => Cap=[ 101.77340698] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
21 | 160 | |
---|---|---|
21 | 160 | |
22.8 | 108 | |
21.4 | 258 | |
18.7 | 360 | |
18.1 | 225 | |
14.3 | 360 | |
24.4 | 146.7 | |
22.8 | 140.8 | |
19.2 | 167.6 | |
17.8 | 167.6 | |
16.4 | 275.8 | |
17.3 | 275.8 | |
15.2 | 275.8 | |
10.4 | 472 | |
10.4 | 460 | |
14.7 | 440 | |
32.4 | 78.7 | |
30.4 | 75.7 | |
33.9 | 71.1 | |
21.5 | 120.1 | |
15.5 | 318 | |
15.2 | 304 | |
13.3 | 350 | |
19.2 | 400 | |
27.3 | 79 | |
26 | 120.3 | |
30.4 | 95.1 | |
15.8 | 351 | |
19.7 | 145 | |
15 | 301 | |
21.4 | 121 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
tf.set_random_seed(777) | |
x = tf.placeholder(tf.float32) | |
y = tf.placeholder(tf.float32) | |
# H(x) = wx + b | |
# Find minimal cost | |
w = tf.Variable(tf.random_normal([1]), name='Weight', dtype=tf.float32) | |
b = tf.Variable(tf.random_normal([1]), name='Bias', dtype=tf.float32) | |
hypo = w * x + b | |
# Cost = (hypo - y)^2 / m | |
cost = tf.reduce_mean(tf.square(hypo - y)) | |
# Find minimal cost | |
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01) | |
train = optimizer.minimize(cost) | |
s = tf.Session() | |
s.run(tf.global_variables_initializer()) | |
for step in range(2000): | |
cost_val, w_val, b_val, _ = s.run([cost, w, b, train], | |
feed_dict={x: [1, 2, 3], y: [1, 2, 3]}) | |
if step % 100 == 0: | |
print('step {}, cost {}, w {}, b {}'.format(step, cost_val, w_val, b_val)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
import random | |
import matplotlib.pyplot as plt | |
from tensorflow.examples.tutorials.mnist import input_data | |
mnist = input_data.read_data_sets('MNIST_data/', one_hot=True) | |
nb_classes = 10 | |
x = tf.placeholder(tf.float32, [None, 784]) | |
y = tf.placeholder(tf.float32, [None, nb_classes]) | |
w = tf.Variable(tf.random_normal([784, nb_classes])) | |
b = tf.Variable(tf.random_normal([nb_classes])) | |
hf = tf.nn.softmax(tf.matmul(x, w) + b) | |
cost = tf.reduce_mean( | |
-tf.reduce_sum(y * tf.log(hf), axis=1)) | |
train = tf.train.GradientDescentOptimizer(0.1).minimize(cost) | |
is_correct = tf.equal(tf.argmax(hf, 1), tf.argmax(y, 1)) | |
acc = tf.reduce_mean( | |
tf.cast(is_correct, tf.float32) | |
) | |
training_epoch = 15 | |
batch_size = 100 | |
s = tf.Session() | |
s.run(tf.global_variables_initializer()) | |
for epoch in range(training_epoch): | |
avg_cost = 0 | |
total_batch = int(mnist.train.num_examples / batch_size) | |
for i in range(total_batch): | |
batchxs, batchys = mnist.train.next_batch(batch_size) | |
c, _ = s.run([cost, train], | |
{x: batchxs, y: batchys}) | |
avg_cost += c / total_batch | |
print('epoch: %04d\ncost: %.9f' % (epoch, avg_cost)) | |
print('Accu :', acc.eval(session=s, feed_dict={ | |
x: mnist.test.images, y: mnist.test.labels | |
})) | |
for i in range(5): | |
r = random.randint(0, mnist.test.num_examples - 1) | |
print('Label: ', s.run(tf.argmax(mnist.test.labels[r: r+1], 1))) | |
print('Predict: ', s.run(tf.argmax(hf, 1), feed_dict={ | |
x:mnist.test.images[r:r+1] | |
})) | |
plt.imshow( | |
mnist.test.images[r:r+1].reshape(28, 28) | |
) | |
plt.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
import numpy as np | |
tf.set_random_seed(1) | |
xy = np.loadtxt('aircon.csv', delimiter=',', dtype=np.float32) | |
x_data = xy[:, 0:-1] | |
y_data = xy[:, [-1]] | |
x = tf.placeholder(tf.float32, shape=[None, 2]) | |
y = tf.placeholder(tf.float32, shape=[None, 1]) | |
w = tf.Variable(tf.random_normal([2, 1]), name='Weight') | |
b = tf.Variable(tf.random_normal([1]), name='Bias') | |
hf = tf.matmul(x, w) + b | |
cost = tf.reduce_mean(tf.square(hf - y)) | |
train = tf.train.GradientDescentOptimizer(0.0001).minimize(cost) | |
predicted = tf.cast(hf > 0.5, dtype=tf.float32) | |
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, y), | |
dtype=tf.float32)) | |
with tf.Session() as s: | |
s.run(tf.global_variables_initializer()) | |
for i in range(50000): | |
ev_cost, ev_hf, ev_w, ev_b, _ = s.run([cost, hf, w, b, train], | |
{ | |
x: x_data, | |
y: y_data | |
}) | |
print('Result:\ncost={}\nw={}\nb={}\n'.format(ev_cost, ev_w, ev_b)) | |
pred_hf, pred_onoff = s.run([hf, predicted], { | |
x: [[39, 80], | |
[32, 40], | |
[22, 90], | |
[17, 20], | |
[27, 95]] | |
}) | |
print('Accuracy: {}\n'.format(s.run(accuracy, {x: x_data, y: y_data}))) | |
pred_result = zip(pred_hf, pred_onoff) | |
print('Predicted result:') | |
for h, o in pred_result: | |
print('HF={}, Onoff={}'.format(h, o)) | |
#Result: | |
#cost=0.13552111387252808 | |
#w=[[ 0.03415401] | |
# [-0.00027531]] | |
#b=[-0.27437535] | |
# | |
#Accuracy: 0.8125 | |
# | |
#Predicted result: | |
#HF=[ 1.03560627], Onoff=[ 1.] | |
#HF=[ 0.80754054], Onoff=[ 1.] | |
#HF=[ 0.45223501], Onoff=[ 0.] | |
#HF=[ 0.30073658], Onoff=[ 0.] | |
#HF=[ 0.62162852], Onoff=[ 1.] |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Is there any way to prevent underflow?? casting?? seems weird...