This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Metadata-Version: 2.2 | |
Name: onnxscript | |
Version: 0.1.0.dev20250113 | |
Summary: Naturally author ONNX functions and models using a subset of Python | |
Author-email: Microsoft Corporation <[email protected]> | |
License: MIT License | |
Copyright (c) Microsoft Corporation | |
Permission is hereby granted, free of charge, to any person obtaining a copy |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import torch | |
import transformers | |
device = "cpu" | |
config = { | |
"_name_or_path": "/fsx/loubna/checkpoints/cosmo2_1T/500000", | |
"architectures": ["LlamaForCausalLM"], | |
"attention_bias": False, | |
"attention_dropout": 0.0, |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import time | |
import onnx | |
import onnxscript.optimizer | |
import requests | |
import torch | |
from PIL import Image | |
from transformers import ( | |
AutoProcessor, | |
MllamaConfig, |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"input_model": { | |
"type": "PyTorchModel", | |
"model_script": "model_loader.py", | |
"model_loader": "load_model", | |
"model_path": "meta-llama/Llama-3.2-11B-Vision", | |
"io_config": { | |
"input_names": [ | |
"input_ids", | |
"attention_mask", |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import torch | |
from PIL import Image | |
from transformers import MllamaForConditionalGeneration, AutoProcessor | |
import onnxscript.optimizer | |
model_id = "meta-llama/Llama-3.2-11B-Vision" | |
model = MllamaForConditionalGeneration.from_pretrained( | |
model_id, |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
from typing import Sequence | |
import torch | |
import torch_onnx | |
import torch_onnx.tools.diff_model | |
from onnxscript import ir | |
import onnxscript | |
import onnxscript.rewriter.pattern as orp |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] failed while attempting to run meta for aten.group_norm.default | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] Traceback (most recent call last): | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] File "/home/justinchu/anaconda3/envs/onnx/lib/python3.11/site-packages/torch/_subclasses/fake_tensor.py", line 1971, in _dispatch_impl | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] r = func(*args, **kwargs) | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] ^^^^^^^^^^^^^^^^^^^^^ | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] File "/home/justinchu/anaconda3/envs/onnx/lib/python3.11/site-packages/torch/_ops.py", line 713, in __call__ | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] return self._op(*args, **kwargs) | |
E0816 19:43:22.023000 57436 torch/_subclasses/fake_tensor.py:1975] ^^^^^^^^^^^^^^^^^^^^^^^^^ | |
E0816 1 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def save_node_data_for_model_explorer(verification_infos: Collection[VerificationInfo], node_names: list[str], model_name: str = "model" | |
): | |
# https://github.com/google-ai-edge/model-explorer/wiki/4.-API-Guide#create-custom-node-data | |
# This API is unstable and may change in the future. | |
from model_explorer import node_data_builder as ndb | |
for field in ("max_abs_diff", "max_rel_diff"): | |
# Populate values for the main graph in a model. | |
main_graph_results: dict[str, ndb.NodeDataResult] = {} | |
for info, node_name in zip(verification_infos, node_names): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
class UpsampleModel(torch.nn.Module): | |
def forward(self, x): | |
return torch.nn.functional.upsample_bilinear(x, scale_factor=2) | |
model = UpsampleModel() | |
ep = torch.export.export(model, (torch.randn(1, 3, 224, 224),)) | |
ep.run_decompositions() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
class M(torch.nn.Module): | |
def __init__(self): | |
super().__init__() | |
self.linear = torch.nn.Linear(5, 10) | |
def forward(self, x): | |
return self.linear(x) |