Skip to content

Instantly share code, notes, and snippets.

@gugod
Created February 24, 2025 02:01
Show Gist options
  • Save gugod/4b595cb3faba33055881449a60fd7991 to your computer and use it in GitHub Desktop.
Save gugod/4b595cb3faba33055881449a60fd7991 to your computer and use it in GitHub Desktop.
diff --git a/convert_hf_to_gguf.py b/convert_hf_to_gguf.py
index 8b7c75d8..ff12f168 100755
--- a/convert_hf_to_gguf.py
+++ b/convert_hf_to_gguf.py
@@ -579,6 +579,8 @@ class Model:
# NOTE: if you get an error here, you need to update the convert_hf_to_gguf_update.py script
# or pull the latest version of the model from Huggingface
# don't edit the hashes manually!
+ if chkhsh == "95092e9dc64e2cd0fc7e0305c53a06daf9efd4045ba7413e04d7ca6916cd274b":
+ res = "llama-3.1-taide-r1-8b-chat"
if chkhsh == "0ef9807a4087ebef797fc749390439009c3b9eda9ad1a097abbe738f486c01e5":
# ref: https://huggingface.co/meta-llama/Meta-Llama-3-8B
res = "llama-bpe"
diff --git a/convert_hf_to_gguf_update.py b/convert_hf_to_gguf_update.py
index fa4989a8..9157c16a 100755
--- a/convert_hf_to_gguf_update.py
+++ b/convert_hf_to_gguf_update.py
@@ -109,9 +109,10 @@ models = [
{"name": "megrez", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/Infinigence/Megrez-3B-Instruct"},
{"name": "deepseek-v3", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/deepseek-ai/DeepSeek-V3"},
{"name": "deepseek-r1-qwen", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"},
+ {"name": "deepseek-r1-qwen", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"},
+ {"name": "llama-3.1-taide-r1-8b-chat", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/voidful/Llama-3.1-TAIDE-R1-8B-Chat"},
]
-
def download_file_with_auth(url, token, save_path):
headers = {"Authorization": f"Bearer {token}"}
response = sess.get(url, headers=headers)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment