This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Convert a huggingface LLaMA checkpoint to an (unsharded) pytorch checkpoint | |
# comes from https://github.com/tloen/alpaca-lora/blob/main/export_state_dict_checkpoint.py | |
import argparse | |
import json | |
from pathlib import Path | |
import torch | |
import transformers | |
from transformers import LlamaForCausalLM, LlamaTokenizer # noqa: E402 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from typing import Optional, Any | |
import torch | |
from transformers.utils import is_accelerate_available, is_bitsandbytes_available | |
from transformers import ( | |
AutoTokenizer, | |
AutoModelForCausalLM, | |
GenerationConfig, | |
pipeline, |