Skip to content

Instantly share code, notes, and snippets.

def sample(args):
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
# decoder model
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(intermediate_dim, activation='relu')(latent_inputs)
outputs = Dense(original_dim, activation='sigmoid')(x)
# Instantiate the decoder model:
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
# full VAE model
outputs = decoder(encoder(inputs))
vae_model = Model(inputs, outputs, name='vae_mlp')
# the KL loss function:
def vae_loss(x, x_decoded_mean):
# compute the average MSE error, then scale it up, ie. simply sum on all axes
reconstruction_loss = K.sum(K.square(x - x_decoded_mean))
# compute the KL loss
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.square(K.exp(z_log_var)), axis=-1)
# return the average loss over all
total_loss = K.mean(reconstruction_loss + kl_loss)
return total_loss
opt = optimizers.Adam(learning_rate=0.0001, clipvalue=0.5)
vae_model.compile(optimizer=opt, loss=vae_loss)
vae_model.summary()
# Finally, we train the model:
results = vae_model.fit(X_train, X_train,
shuffle=True,
epochs=32,
batch_size=256)
X_train_pred = vae_model.predict(X_train)
error_thresh = np.quantile(mae_vector, 0.99)
mae_vector = get_error_term(X_train_pred, X_train, _rmse=False)
X_test_pred = vae_model.predict(X_test)
mae_vector_test = get_error_term(X_pred, X_test, _rmse=False)
anomalies = (mae_vector_test > error_thresh)
def perm(orig_str, new_str=""):
if(len(new_str) == len(orig_str)):
print(new_str)
else:
for char in orig_str:
if(char not in new_str):
perm(orig_str, new_str + char)
# another version
def perm(orig_str, new_str=""):
import ml.dmlc.xgboost4j.LabeledPoint
import ml.dmlc.xgboost4j.scala.{Booster, DMatrix, XGBoost}
// Load model from binary file
object Classifier {
var model: Option[Booster] = None
def Init(modelFile:String): Unit = {
if (model.isEmpty) {
model = Some(XGBoost.loadModel(modelFile))
private def getInputVector(rawVector:Seq[Float]): DMatrix = {
val nRows = 1
val nCols = rawVector.length
val missingVal = Float.NaN
new DMatrix(rawVector.toArray[Float], nRows, nCols, missingVal)
}
def predict(recordID:String, features:Seq[Float]): (String, Float) = {
val xgbInput = getInputVector(features)
def getStreamTopology(inputTopic:String):Topology = {
val builder = new StreamsBuilder()
val reqStream = builder.stream[String, PredictRequest](inputTopic)
reqStream
.map( (_, request) => {
Classifier.predict(request.recordID, request.featuresVector)
})
.split()