This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import tempfile | |
| import gradio as gr | |
| import numpy as np | |
| import torch | |
| from PIL import Image | |
| import trimesh | |
| from huggingface_hub import hf_hub_download | |
| from depth_anything_v2.dpt import DepthAnythingV2 | |
| from pygltflib import GLTF2, Node, Camera, Perspective, Scene |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import argparse | |
| import logging | |
| import torch | |
| from safetensors import safe_open | |
| from safetensors.torch import load_file, save_file | |
| logger = logging.getLogger(__name__) | |
| logging.basicConfig(level=logging.INFO) | |
| QWEN_IMAGE_KEYS = [ |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import argparse | |
| from safetensors.torch import safe_open | |
| def load_structure(path): | |
| """Return {key: (shape, dtype)} dict from safetensors file""" | |
| tensors = {} | |
| with safe_open(path, framework="pt") as f: | |
| for k in f.keys(): | |
| t = f.get_tensor(k) | |
| tensors[k] = (tuple(t.shape), str(t.dtype)) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Total tensors in 1: 1440 | |
| Total tensors in 2: 1680 | |
| ⚠️ Keys only in first file: | |
| transformer_blocks.0.attn.add_k_proj.lora_A.default.weight [(16, 3072)] torch.bfloat16 | |
| transformer_blocks.0.attn.add_k_proj.lora_B.default.weight [(3072, 16)] torch.bfloat16 | |
| transformer_blocks.0.attn.add_q_proj.lora_A.default.weight [(16, 3072)] torch.bfloat16 | |
| transformer_blocks.0.attn.add_q_proj.lora_B.default.weight [(3072, 16)] torch.bfloat16 | |
| transformer_blocks.0.attn.add_v_proj.lora_A.default.weight [(16, 3072)] torch.bfloat16 | |
| transformer_blocks.0.attn.add_v_proj.lora_B.default.weight [(3072, 16)] torch.bfloat16 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Total tensors in 1: 1440 | |
| Total tensors in 2: 1680 | |
| ⚠️ Keys only in second file: | |
| transformer_blocks.0.img_mlp.net.0.proj.lora_A.default.weight [(4, 3072)] torch.bfloat16 | |
| transformer_blocks.0.img_mlp.net.0.proj.lora_B.default.weight [(12288, 4)] torch.bfloat16 | |
| transformer_blocks.0.txt_mlp.net.0.proj.lora_A.default.weight [(4, 3072)] torch.bfloat16 | |
| transformer_blocks.0.txt_mlp.net.0.proj.lora_B.default.weight [(12288, 4)] torch.bfloat16 | |
| transformer_blocks.1.img_mlp.net.0.proj.lora_A.default.weight [(4, 3072)] torch.bfloat16 | |
| transformer_blocks.1.img_mlp.net.0.proj.lora_B.default.weight [(12288, 4)] torch.bfloat16 |
OlderNewer