Skip to content

Instantly share code, notes, and snippets.

View gaphex's full-sized avatar

Denis gaphex

  • Moscow
View GitHub Profile
json.dump(model.to_json(), open("model.json", "w"))
model = tf.keras.models.model_from_json(json.load(open("model.json")),
custom_objects={"BertLayer": BertLayer})
model.load_weights("bert_tuned.hdf5")
from tensorflow.python.framework.graph_util import convert_variables_to_constants
from tensorflow.python.tools.optimize_for_inference_lib import optimize_for_inference
def freeze_keras_model(model, export_path=None, clear_devices=True):
"""
Freezes the state of a session into a pruned computation graph.
@param model The Keras model to be optimized for inference.
@param clear_devices Remove the device directives from the graph for better portability.
@return The frozen graph definition.
frozen_graph = freeze_keras_model(model, export_path="frozen_graph.pb")
def load_graph(frozen_graph_filename):
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
with tf.Graph().as_default() as graph:
tf.import_graph_def(graph_def)
return graph
restored_graph = load_graph("frozen_graph.pb")
graph_ops = restored_graph.get_operations()
input_op, output_op = graph_ops[0].name, graph_ops[-1].name
x = restored_graph.get_tensor_by_name(input_op + ':0')
y = restored_graph.get_tensor_by_name(output_op + ':0')
preprocessor = build_preprocessor("./uncased_L-12_H-768_A-12/vocab.txt", 64)
py_func = tf.numpy_function(preprocessor, [x], [tf.int32, tf.int32, tf.int32], name='preprocessor')
sess = tf.Session(graph=restored_graph)
t_data = ["What would happen if earth stopped rotating? ||| What would happen if the Earth rotation suddenly stop?"]
y_out = sess.run(y, feed_dict={
x: np.array(t_data).reshape((-1,1))
})
y_out
@gaphex
gaphex / finetuning_example.py
Created December 10, 2019 11:40
Binary classification with BertLayer
inp = tf.keras.Input(shape=(1,), dtype=tf.string)
encoder = BertLayer(bert_path="./bert-module/", seq_len=48,
tune_embeddings=False, do_preprocessing=True,
pooling='cls', n_tune_layers=3, verbose=False)
pred = tf.keras.layers.Dense(1, activation='sigmoid')(encoder(inp))
model = tf.keras.models.Model(inputs=[inp], outputs=[pred])
model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
model.fit(trX, trY, validation_data=[tsX, tsY], batch_size=128, epochs=5)
We can't make this file beautiful and searchable because it's too large.
-0.033 -0.578 0.1926 0.2186 -0.5674 0.2001 0.4424 -0.2903 0.1769 0.355 0.2542 -1.124 0.1263 -0.5547 0.157 -0.1073 -0.02002 -0.1493 0.02702 0.1525 0.1299 0.3364 0.276 -0.004074 -0.3472 0.4946 -0.1622 0.3628 0.03714 0.3384 0.3384 -0.4062 -0.392 -0.3782 0.2084 0.09204 -0.01868 0.1995 -0.03073 0.4304 0.087 -0.04553 -0.6196 0.05472 0.337 0.4192 0.4705 -0.2595 -0.2952 -1.256 -0.10583 0.4216 -0.03677 0.04718 -0.414 0.651 -0.1908 0.2788 -0.125 0.1112 -0.3457 -0.4543 0.256 0.444 0.372 0.007282 0.254 -0.0879 -0.9517 0.1906 0.0641 0.3645 0.09 -0.4607 -0.4106 -0.1879 0.4492 0.12067 -0.1442 0.3267 0.1013 -0.4993 -0.2394 0.814 -0.2183 0.3274 -0.2252 -0.02669 0.2515 -0.457 -0.0087 -0.6934 0.312 0.1274 -0.3042 0.05402 -0.06024 -0.3518 0.5913 0.063 -0.4602 -0.6064 -0.04974 -0.01523 0.4531 -0.07745 -0.01627 0.2477 0.2498 -0.02473 0.10626 0.164 0.3872 -0.4756 0.2184 -0.7964 -0.3525 0.4448 0.02672 -0.432 0.1124 0.3257 0.3655 -0.203 -0.2278 0.01701 0.1403 0.235 0.0183 -0.1366 -0.5996 0.1203 0.3403 0.04422 -0.16 -0.657 0.362 -0.18
Label Id Sentence
blagotvoritelnost_212 0 греф_помогает_тяжело_больным
blagotvoritelnost_212 0 греф_миллиардер
blagotvoritelnost_212 0 может_поможете_бедным_у_вас_же_много_денег
blagotvoritelnost_212 0 сбербанк_заботится_о_больных
blagotvoritelnost_212 0 сбербанк_помогает_людям
blagotvoritelnost_212 0 грефу_только_денег_не_надо
blagotvoritelnost_212 0 сбербанк_заботится_о_ветеранах
blagotvoritelnost_212 0 грефу_только_денег_не_надо
blagotvoritelnost_212 0 греф_помогает_научному_прогрессу