Created
May 26, 2022 14:19
-
-
Save pashu123/41f7d777a99e550599437336b4c64430 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
from shark.shark_inference import SharkInference | |
torch.manual_seed(0) | |
tokenizer = AutoTokenizer.from_pretrained("microsoft/MiniLM-L12-H384-uncased") | |
class MiniLMSequenceClassification(torch.nn.Module): | |
def __init__(self): | |
super().__init__() | |
self.model = AutoModelForSequenceClassification.from_pretrained( | |
"microsoft/MiniLM-L12-H384-uncased", # The pretrained model. | |
num_labels= | |
2, # The number of output labels--2 for binary classification. | |
output_attentions= | |
False, # Whether the model returns attentions weights. | |
output_hidden_states= | |
False, # Whether the model returns all hidden-states. | |
torchscript=True, | |
) | |
def forward(self, tokens): | |
return self.model.forward(tokens, tokens, tokens)[0] | |
test_input = torch.randint(2, (1, 128)).to(torch.int32) | |
shark_module = SharkInference(MiniLMSequenceClassification(), (test_input,), | |
jit_trace=True) | |
shark_module.compile() | |
shark_module.forward((test_input,)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment