Skip to content

Instantly share code, notes, and snippets.

@kris-singh
Created April 1, 2018 18:22
Show Gist options
  • Save kris-singh/364867d41d4c65fe356ac3d454fba596 to your computer and use it in GitHub Desktop.
Save kris-singh/364867d41d4c65fe356ac3d454fba596 to your computer and use it in GitHub Desktop.
class SelectLoss:
"""
Selection based on Loss values of samples.
No need of rejection sampling.
"""
def __init__(self, X, Y, fwd_batch_size, batch_size, _, loss):
"""
:param loss: loss function
:param x_train: training dataN
:param y_train: training labels
"""
self.X = X
self.Y = Y
self.fwd_batch_size = fwd_batch_size
self.batch_size = batch_size
self.loss = loss
self.A = tf.placeholder('float', shape=[None, 10])
self.B = tf.placeholder('float', shape=[None, 10])
self.res = K.get_value (tf.nn.softmax_cross_entropy_with_logits (labels=self.A, logits=self.B))
def sample(self, model):
"""
Sort the loss values of the training samples.
Variables
"""
idx = np.random.choice (np.arange (0, self.X.shape[0]), size=self.fwd_batch_size, replace=False)
res = model.predict_proba (self.X[idx])
with tf.Session () as sess:
sess.run(self.res, feed_dict={self.A: self.Y[idx], self.B: res})
res = res / np.sum (res)
return np.random.choice (idx,
size=self.batch_size,
replace=False,
p=res)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment