Skip to content

Instantly share code, notes, and snippets.

View conditg's full-sized avatar

Greg Condit conditg

View GitHub Profile
import numpy as np
from scipy.special import expit as sigmoid
import torch
from torch import nn
def forget_gate(x, h, Weights_hf, Bias_hf, Weights_xf, Bias_xf, prev_cell_state):
forget_hidden = np.dot(Weights_hf, h) + Bias_hf
forget_eventx = np.dot(Weights_xf, x) + Bias_xf
return np.multiply( sigmoid(forget_hidden + forget_eventx), prev_cell_state )