Skip to content

Instantly share code, notes, and snippets.

@cjratcliff
cjratcliff / adam.py
Last active November 28, 2022 01:05
Implementation of Adam in TensorFlow
import tensorflow as tf
class AdamOptimizer(tf.train.Optimizer):
def __init__(self, alpha=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8):
self.alpha = alpha
self.beta1 = beta1
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
N = 100
dt = 1/30 # 30fps
iterations = 600
bounds = [-2, 2, -2, 2]
init_state = [-0.5,-0.5]
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# Adapted from https://github.com/fchollet/keras/blob/master/examples/imdb_lstm.py
from __future__ import print_function
from __future__ import division
from keras.preprocessing import sequence
from keras.datasets import imdb
from keras.layers.core import Dense
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import LabelBinarizer
@tf.RegisterGradient("CustomRelu")
def _custom_relu_grad(op, grad):
#return gen_nn_ops._relu_grad(grad, op.outputs[0])
return tf.where(tf.greater(op.outputs[0],0.0),grad,tf.zeros_like(grad))
import time
import numpy as np
import tensorflow as tf
from keras.datasets import mnist, cifar10, cifar100
import matplotlib.pyplot as plt
from utils import get_minibatches_idx
# Based on https://jmetzen.github.io/2015-11-27/vae.html
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from sklearn.model_selection import train_test_split
from utils import get_minibatches_idx