Skip to content

Instantly share code, notes, and snippets.

View innat's full-sized avatar
:octocat:
Working from home

Mohammed Innat innat

:octocat:
Working from home
View GitHub Profile
@innat
innat / XNet TF.Keras 1.py
Last active April 3, 2023 07:49
Official Implementation of UNet++ (EfficientNets) in TensorFlow 2
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import Model, Input
from tensorflow.keras.layers import Conv2DTranspose
from tensorflow.keras.layers import UpSampling2D
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Concatenate
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import Model
from tensorflow.keras import layers
class ConvoBlocks(tf.keras.layers.Layer):
def __init__(self, num_filters=256,
kernel_size=3, dilation_rate=1,
padding="same", use_bias=False, **kwargs):
super(ConvoBlocks, self).__init__(**kwargs)
@innat
innat / Deep-Neural-Decision-Forest.py
Last active November 13, 2021 00:29
TF 2 Implementation.
# Reference: https://keras.io/examples/structured_data/deep_neural_decision_forests/
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow import keras
class NeuralDecisionTree(keras.Model):
def __init__(self, depth, num_features, used_features_rate, num_classes):
super(NeuralDecisionTree, self).__init__()
self.depth = depth
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow import keras
print('TensorFlow', tf.__version__)
class ResidualBlock(layers.Layer):
def __init__(self, block_type=None, n_filters=None):
super(ResidualBlock, self).__init__()
self.n_filters = n_filters
if block_type == 'identity':
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@innat
innat / ReflectionRemoval.md
Last active August 22, 2021 13:30
A quick note on reflection removal.

Reflection Removal with Deep Learning

Few interesting model on reflection removal algorithm.

animation2

cascading_results

import tensorflow as tf
from tensorflow.keras.layers import *
from tensorflow.keras.models import Model
from tensorflow.keras.utils import plot_model
class Conv3DBatchNorm(tf.keras.layers.Layer):
def __init__(self, nb_filters, kernel_size, padding, strides):
super(Conv3DBatchNorm, self).__init__()
# parameters
self.nb_filters = nb_filters
def vis(path1, path2, n_images, is_random=True, figsize=(16, 16)):
'''
https://github.com/innat
'''
plt.figure(figsize=figsize)
image_names = os.listdir(path1)
masks_names = os.listdir(path2)
for i in range(n_images):
if is_random:
@innat
innat / focusnet_sample.py
Created June 16, 2021 19:17 — forked from prerakmody/focusnet_sample.py
Netron.app example to visualize a tensorflow 2.x model
"""
pip install tensorflow
pip install tf2onnx keras2onnx onnxmltools
"""
import os
import pdb
import json
import traceback
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
@innat
innat / Gradient_Accumulation_TF2.py
Last active June 9, 2025 10:24
Gradient Accumulation with Custom fit in TF.Keras. MNIST example.
import tensorflow as tf
# credit: https://stackoverflow.com/a/66524901/9215780
class CustomTrainStep(tf.keras.Model):
def __init__(self, n_gradients, *args, **kwargs):
super().__init__(*args, **kwargs)
self.n_gradients = tf.constant(n_gradients, dtype=tf.int32)
self.n_acum_step = tf.Variable(0, dtype=tf.int32, trainable=False)
self.gradient_accumulation = [tf.Variable(tf.zeros_like(v, dtype=tf.float32),
trainable=False) for v in self.trainable_variables]