This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from functools import wraps | |
from typing import Any, Dict, Iterable, Optional, Tuple, TypeVar | |
import torch | |
T = TypeVar("T", bound=callable) | |
ref_map = { | |
torch.float64: [torch.float16,torch.float32,torch.bfloat16,torch.half], | |
torch.float32: [torch.float16,torch.bfloat16,torch.half], | |
torch.float16: [], | |
torch.bfloat16: [], |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import torch | |
from torchvision.transforms import functional as F | |
from PIL import Image | |
image_path = "./117.webp" | |
num_images = 32 | |
total_rot = 360 | |
rot_step = total_rot / num_images |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from typing import List, Any | |
import enum | |
from cuda import cudart | |
CUDART_VERSION = 12020 | |
CUDA_EGL_MAX_PLANES = 3 | |
CUDA_IPC_HANDLE_SIZE = 64 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
torch.set_printoptions(precision=4, sci_mode=False) | |
import triton | |
import triton.language as tl | |
from torch import Tensor | |
def quanitze_fp8_tensorwise(x: torch.Tensor, dtype=torch.float8_e4m3fn): | |
scale = x.abs().max() / torch.finfo(dtype).max |
OlderNewer