Created
March 15, 2025 13:44
-
-
Save karaketir16/684a4d79465bc0a61eb613765c380f1e to your computer and use it in GitHub Desktop.
Ollama Model Install
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import os | |
| import sys | |
| import shutil | |
| import hashlib | |
| from pathlib import Path | |
| def get_models_path(custom_path=None): | |
| if custom_path: | |
| return custom_path | |
| target_os = sys.platform | |
| custom_model_store_path = os.getenv("OLLAMA_MODELS") | |
| if custom_model_store_path: | |
| return custom_model_store_path.replace("%username%", os.getenv("USERNAME", "")) | |
| if target_os == "darwin": | |
| return os.path.expanduser("~/.ollama/models") | |
| elif target_os == "linux": | |
| if Path("/var/snap/ollama/common/models").is_dir(): | |
| return "/var/snap/ollama/common/models" | |
| return "/usr/share/ollama/.ollama/models" | |
| elif target_os == "win32": | |
| username = os.getenv("USERNAME") | |
| if not username: | |
| print("Error: Environment variable USERNAME is not set") | |
| sys.exit(1) | |
| return f"C:/Users/{username}/.ollama/models" | |
| else: | |
| print(f"OS {target_os} not supported") | |
| sys.exit(1) | |
| def get_manifest_file(folder_path): | |
| try: | |
| return open(os.path.join(folder_path, "manifest"), "rb") | |
| except FileNotFoundError: | |
| raise FileNotFoundError("Error opening manifest file") | |
| def get_blob_names(path): | |
| try: | |
| return [f.name for f in Path(path).iterdir() if f.is_file() and f.name != "manifest"] | |
| except Exception as e: | |
| raise Exception(f"Error reading blobs directory: {e}") | |
| def get_file_hashed_name(file_path): | |
| try: | |
| hasher = hashlib.sha256() | |
| with open(file_path, 'rb') as f: | |
| while chunk := f.read(8192): | |
| hasher.update(chunk) | |
| return f"sha256{hasher.hexdigest()}" | |
| except Exception as e: | |
| raise Exception(f"Error hashing file: {e}") | |
| def parse_blobs_destination_path(source, target_folder, file_name): | |
| new_path = target_folder.replace("\\", "/") | |
| if "sha256" not in file_name: | |
| try: | |
| file_hash = get_file_hashed_name(os.path.join(source, file_name)) | |
| new_path = os.path.join(new_path, file_hash) | |
| except Exception as e: | |
| print(f"Error getting file hash for {file_name}: {e}") | |
| else: | |
| new_path = os.path.join(new_path, file_name) | |
| return new_path.replace("sha256", "sha256-") | |
| def parse_model_name(model_name): | |
| split_model_name = model_name.split(":") | |
| model_tag = "latest" if len(split_model_name) == 1 else split_model_name[1] | |
| return model_tag, split_model_name[0] | |
| def install_model(model_name, downloaded_model_path, custom_models_path=None): | |
| models_path = get_models_path(custom_models_path) | |
| model_tag, model_prefix = parse_model_name(model_name) | |
| manifest_path = os.path.join(models_path, f"manifests/registry.ollama.ai/library/{model_prefix}") | |
| os.makedirs(models_path, exist_ok=True) | |
| os.makedirs(manifest_path, exist_ok=True) | |
| manifest_file_path = os.path.join(manifest_path, model_tag) | |
| if Path(manifest_file_path).exists(): | |
| user_input = input("Warning! Some Model Files already exist. Type 'Y' to override them: ").strip().upper() | |
| if user_input != "Y": | |
| print("Installation aborted!") | |
| sys.exit(1) | |
| try: | |
| with get_manifest_file(downloaded_model_path) as src, open(manifest_file_path, "wb") as dest: | |
| shutil.copyfileobj(src, dest) | |
| except Exception as e: | |
| raise Exception(f"Error copying manifest file: {e}") | |
| blobs_folder_path = os.path.join(models_path, "blobs") | |
| os.makedirs(blobs_folder_path, exist_ok=True) | |
| try: | |
| blob_names = get_blob_names(downloaded_model_path) | |
| except Exception as e: | |
| raise Exception(f"Error getting blobs path: {e}") | |
| print("Copying blobs to", blobs_folder_path) | |
| print("This may take a while, so don't worry if it seems stuck.") | |
| for blob_name in blob_names: | |
| src_path = os.path.join(downloaded_model_path, blob_name) | |
| dest_path = parse_blobs_destination_path(downloaded_model_path, blobs_folder_path, blob_name) | |
| try: | |
| shutil.copy2(src_path, dest_path) | |
| except Exception as e: | |
| raise Exception(f"Error copying blob file: {e}") | |
| print("Model installed successfully!") | |
| if __name__ == "__main__": | |
| if len(sys.argv) < 3 or len(sys.argv) > 4: | |
| print("Usage: python install_model.py <model_name> <downloaded_model_path> [custom_models_path]") | |
| sys.exit(1) | |
| model_name = sys.argv[1] | |
| downloaded_model_path = sys.argv[2] | |
| custom_models_path = sys.argv[3] if len(sys.argv) == 4 else None | |
| install_model(model_name, downloaded_model_path, custom_models_path) |
Author
Author
I have converted a part to python
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
reference: https://github.com/amirrezaDev1378/ollama-model-direct-download/