Last active
May 3, 2023 18:46
-
-
Save foowaa/5b20aebd1dff19ee024b6c72e14347bb to your computer and use it in GitHub Desktop.
position encoding of Transformer on numpy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
numpy sinusoid position encoding of Transformer model. | |
params: | |
n_position(n):number of positions | |
d_hid(m): dimension of embedding vector | |
padding_idx:set 0 dimension | |
return: | |
sinusoid_table(n*m):numpy array | |
''' | |
def get_sinusoid_encoding_table(n_position, d_hid, padding_idx=None): | |
''' ''' | |
def cal_angle(position, hid_idx): | |
return position / np.power(10000, 2 * (hid_idx // 2) / d_hid) | |
def get_posi_angle_vec(position): | |
return [cal_angle(position, hid_j) for hid_j in range(d_hid)] | |
sinusoid_table = np.array([get_posi_angle_vec(pos_i) for pos_i in range(n_position)]) | |
sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i | |
sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1 | |
if padding_idx is not None: | |
# zero vector for padding dimension | |
sinusoid_table[padding_idx] = 0. | |
return sinusoid_table |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment