Skip to content

Instantly share code, notes, and snippets.

@suriyadeepan
Created May 13, 2018 08:40
Show Gist options
  • Save suriyadeepan/4e2f945ccaf3c160af8dad20ed5e90d1 to your computer and use it in GitHub Desktop.
Save suriyadeepan/4e2f945ccaf3c160af8dad20ed5e90d1 to your computer and use it in GitHub Desktop.
Multi-label classification
import tensorflow as tf
import numpy as np
class LogisticRegressor(object):
def __init__(self, num_attrs, num_labels, threshold=0.8, lr=0.01):
self.attrs = tf.placeholder(tf.float32, [None, num_attrs], name='attrs')
self.labels= tf.placeholder(tf.int32, [None, num_labels], name='labels')
W = tf.get_variable(shape=[num_attrs, num_labels], dtype=tf.float32,
initializer=tf.random_uniform_initializer(-0.01, 0.01),
name='W')
b = tf.get_variable(shape=[num_labels, ], dtype=tf.float32,
initializer=tf.random_uniform_initializer(-0.01, 0.01),
name='b')
logits = tf.matmul(self.attrs, W) + b
ce = tf.nn.sigmoid_cross_entropy_with_logits(
labels= tf.cast(self.labels, tf.float32),
logits=logits
)
loss = tf.reduce_mean(ce)
accuracy = tf.cast(tf.equal(self.labels,
tf.cast(tf.nn.sigmoid(logits) > threshold, tf.int32)
), tf.float32)
self.out = {
'logits' : tf.nn.sigmoid(logits),
'accuracy' : tf.reduce_mean(accuracy),
'loss' : loss
}
self.train_op = tf.constant(5.) #tf.train.AdamOptimizer(lr).minimize(loss)
def random_execution(model):
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
return sess.run(model.out,
feed_dict = {
model.attrs: np.random.uniform(-0.01, 0.01, [2, 103]),
model.labels : np.random.randint(0, 1, [2, 14])
}
)
if __name__ == '__main__':
model = LogisticRegressor(103, 14)
out = random_execution(model)
print(out)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment