This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import math | |
import torch | |
from torch import nn | |
import torch.nn.functional as F | |
class CrossAttention(nn.Module): | |
def __init__(self, d): | |
""" | |
Arguments: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import math | |
import torch | |
from torch import nn | |
import torch.nn.functional as F | |
class SelfAttention(nn.Module): | |
def __init__(self, d): | |
""" | |
Arguments: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from torch import nn | |
class MoEBlock(nn.Module): | |
def __init__( | |
self, | |
d, | |
H, | |
C, | |
n_exp, |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Based upon ColossalAI OpenMoE | |
""" | |
from torch import nn | |
class MOELayer(nn.Module): | |
def __init__( | |
self, | |
d, |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Computes ST-MoE router z loss (https://arxiv.org/abs/2202.08906) | |
See equation (5) on page 7 | |
""" | |
import torch | |
# constants | |
B = 16 # batch size | |
C = 256 # sequence length |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Computes Switch Transformer auxiliary loss (https://arxiv.org/abs/2101.03961) | |
See equations (4)-(6) on page 7 | |
""" | |
import torch | |
import torch.nn.functional as F | |
# constants | |
B = 16 # batch size |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import math | |
import torch | |
from torch import nn | |
from torch.nn import functional as F | |
class Router(nn.Module): | |
def __init__( | |
self, | |
d, |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
from torch import nn | |
from torch.nn import functional as F | |
class BasicSoftmaxRouter(nn.Module): | |
def __init__( | |
self, | |
d, | |
n_exp = 8, | |
top_k = 2, |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Based upon ColossalAI OpenMoE | |
""" | |
import torch | |
from torch import nn | |
class MLPExperts(nn.Module): | |
def __init__( |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
from transformers import AutoTokenizer | |
# load the llama-3.2 tokenizer | |
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-3.1-8B') | |
# raw text | |
text = "This raw text will be tokenized" | |
# create tokens using tokenizer |
NewerOlder