Skip to content

Instantly share code, notes, and snippets.

View ajinkyajawale14499's full-sized avatar
🎯
Focusing

Ajinkya ajinkyajawale14499

🎯
Focusing
View GitHub Profile
@ajinkyajawale14499
ajinkyajawale14499 / LevelOrderTraversaloneQ.cpp
Created July 28, 2019 18:46
Print level order traversal of Binary Tree using One Queue in o(n)
// code goes here!
#include <iostream>
#include <queue>
using namespace std;
// node structure
struct Node
{
int data;
@ajinkyajawale14499
ajinkyajawale14499 / LevelOrderBinaryTree.java
Created July 28, 2019 18:22
Print level order traversal of Binary Tree
import java.util.*;
class Node
{
int data;
Node left, right;
public Node(int item)
{
data = item;
left = right = null;
}
@ajinkyajawale14499
ajinkyajawale14499 / kDistanceBinaryTree.cpp
Created July 28, 2019 17:32
Nodes at k distance from root of Binary Tree
//code goes here!
#include <bits/stdc++.h>
#include <iostream>
#include <algorithm>
using namespace std;
class Node
{
public:
int data;
acc = history_fine.history['accuracy']
val_acc = history_fine.history['val_accuracy']
loss = history_fine.history['loss']
val_loss = history_fine.history['val_loss']
plt.figure(figsize=(8, 8))
plt.subplot(2, 1, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
saved_model_dir = 'save/fine_tuning'
tf.saved_model.save(model, saved_model_dir)
converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir)
tflite_model = converter.convert()
with open('model.tflite', 'wb') as f:
f.write(tflite_model)
# download the model & labels
model.compile(loss='categorical_crossentropy',
optimizer = tf.keras.optimizers.Adam(1e-5),
metrics=['accuracy'])
# Let's take a look to see how many layers are in the base model
print("Number of layers in the base model: ", len(base_model.layers))
# Fine tune from this layer onwards
fine_tune_at = 100
# Freeze all the layers before the `fine_tune_at` layer
for layer in base_model.layers[:fine_tune_at]:
layer.trainable = False
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
plt.figure(figsize=(8, 8))
plt.subplot(2, 1, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
epochs = 10
history = model.fit(train_generator,
epochs=epochs,
validation_data=val_generator)
model.compile(optimizer=tf.keras.optimizers.Adam(),
loss='categorical_crossentropy',
metrics=['accuracy'])