This is how we pulled off our amazing 404 page.
A Pen by nclud team on CodePen.
This is how we pulled off our amazing 404 page.
A Pen by nclud team on CodePen.
import cv2 | |
import numpy as np | |
from PIL import Image | |
import tensorflow as tf | |
import matplotlib.pyplot as plt | |
from skimage.transform import resize | |
from tensorflow.keras.models import Model | |
from tensorflow.keras.preprocessing.image import load_img, img_to_array | |
from tensorflow.keras.applications import EfficientNetB4 | |
from tensorflow.keras.applications import MobileNetV2 |
0.00632 18.00 2.310 0 0.5380 6.5750 65.20 4.0900 1 296.0 15.30 396.90 4.98 24.00 | |
0.02731 0.00 7.070 0 0.4690 6.4210 78.90 4.9671 2 242.0 17.80 396.90 9.14 21.60 | |
0.02729 0.00 7.070 0 0.4690 7.1850 61.10 4.9671 2 242.0 17.80 392.83 4.03 34.70 | |
0.03237 0.00 2.180 0 0.4580 6.9980 45.80 6.0622 3 222.0 18.70 394.63 2.94 33.40 | |
0.06905 0.00 2.180 0 0.4580 7.1470 54.20 6.0622 3 222.0 18.70 396.90 5.33 36.20 | |
0.02985 0.00 2.180 0 0.4580 6.4300 58.70 6.0622 3 222.0 18.70 394.12 5.21 28.70 | |
0.08829 12.50 7.870 0 0.5240 6.0120 66.60 5.5605 5 311.0 15.20 395.60 12.43 22.90 | |
0.14455 12.50 7.870 0 0.5240 6.1720 96.10 5.9505 5 311.0 15.20 396.90 19.15 27.10 | |
0.21124 12.50 7.870 0 0.5240 5.6310 100.00 6.0821 5 311.0 15.20 386.63 29.93 16.50 | |
0.17004 12.50 7.870 0 0.5240 6.0040 85.90 6.5921 5 311.0 15.20 386.71 17.10 18.90 |
import tensorflow as tf | |
policy = tf.keras.mixed_precision.Policy('mixed_float16') | |
tf.keras.mixed_precision.set_global_policy(policy) | |
inputs = keras.Input(shape=(784,)) | |
x = tf.keras.layers.Dense(4096, activation='relu')(inputs) | |
x = tf.keras.layers.Dense(4096, activation='relu')(x) | |
x = layers.Dense(10)(x) | |
outputs = layers.Activation('softmax', dtype='float32')(x) |
import tensorflow as tf | |
policy = tf.keras.mixed_precision.Policy('mixed_float16') | |
tf.keras.mixed_precision.set_global_policy(policy) | |
inputs = keras.Input(shape=(784,)) | |
x = tf.keras.layers.Dense(4096, activation='relu')(inputs) | |
x = tf.keras.layers.Dense(4096, activation='relu')(x) | |
x = layers.Dense(10)(x) | |
outputs = layers.Activation('softmax', dtype='float32')(x) |
def model_builder(hp): | |
model = keras.Sequential() | |
model.add(keras.layers.Flatten(input_shape=(28, 28))) | |
# Tune the number of units in the first Dense layer | |
# Choose an optimal value between 32-512 | |
hp_units = hp.Int('units', min_value = 32, max_value = 512, step = 32) | |
model.add(keras.layers.Dense(units = hp_units, activation = 'relu')) | |
model.add(keras.layers.Dense(10)) |
import kerastuner as kt | |
import tensorflow as tf | |
def model_builder(hp): | |
model = tf.keras.Sequential() | |
model.add(tf.keras.layers.Flatten(input_shape=(28, 28))) | |
# Tune the number of units in the first Dense layer | |
# Choose an optimal value between 32-512 | |
hp_units = hp.Int('units', min_value = 32, max_value = 512, step = 32) |
initial_learning_rate = 0.1 | |
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay( | |
initial_learning_rate, | |
decay_steps = 100000, | |
decay_rate = 0.96, | |
staircase = True) | |
model.compile(optimizer=tf.keras.optimizers.SGD(learning_rate = lr_schedule), | |
loss = 'sparse_categorical_crossentropy', | |
metrics = ['accuracy']) |