Last active
December 3, 2018 10:44
-
-
Save DerekChia/41b4e9c3a38a69ac941f3c254c1bcc31 to your computer and use it in GitHub Desktop.
w2v_training_forward_pass
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class word2vec(): | |
def train(self, training_data): | |
##Removed## | |
# Cycle through each epoch | |
for i in range(self.epochs): | |
# Intialise loss to 0 | |
self.loss = 0 | |
# Cycle through each training sample | |
# w_t = vector for target word, w_c = vectors for context words | |
for w_t, w_c in training_data: | |
# Forward pass - Pass in vector for target word (w_t) to get: | |
# 1. predicted y using softmax (y_pred) 2. matrix of hidden layer (h) 3. output layer before softmax (u) | |
y_pred, h, u = self.forward_pass(w_t) | |
##Removed## | |
def forward_pass(self, x): | |
# x is one-hot vector for target word, shape - 9x1 | |
# Run through first matrix (w1) to get hidden layer - 10x9 dot 9x1 gives us 10x1 | |
h = np.dot(self.w1.T, x) | |
# Dot product hidden layer with second matrix (w2) - 9x10 dot 10x1 gives us 9x1 | |
u = np.dot(self.w2.T, h) | |
# Run 1x9 through softmax to force each element to range of [0, 1] - 1x8 | |
y_c = self.softmax(u) | |
return y_c, h, u | |
def softmax(self, x): | |
e_x = np.exp(x - np.max(x)) | |
return e_x / e_x.sum(axis=0) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment