Skip to content

Instantly share code, notes, and snippets.

View luiscape's full-sized avatar
🤿

Luis Capelo luiscape

🤿
View GitHub Profile
@luiscape
luiscape / client.py
Created August 13, 2024 20:16
Running ZeroMQ in Modal with a Modal Tunnel.
import sys
import time
import zmq
from common import Message, app
from server import process_message
def zmq_query(address: str, n_messages: int = 10):
@luiscape
luiscape / entrypoint.sh
Last active August 5, 2024 14:20
Ollama with memory snapshots
#!/usr/bin/env bash
# Needed in order to support the ollama/ollama image.
exec "$@"
@luiscape
luiscape / sd_torch_2_0.py
Created March 18, 2023 15:23
Stable Diffusion on PyTorch 2.0 (Modal)
# ## Basic setup
from __future__ import annotations
import io
import os
import time
from pathlib import Path
import modal
@luiscape
luiscape / call_inside_modal.py
Created January 27, 2023 02:59
call_inside_modal.py
"""
Script to run batch inference using HuggingFace's zero-shot text-classification model on Modal.
Based on the tutorial: https://modal.com/docs/guide/ex/batch_inference_using_huggingface
Goal: filter a large Hugging Face dataset for food-related images (based on the text caption).
"""
import io
import modal
@luiscape
luiscape / batch_inference_modal.py
Created January 27, 2023 02:48
batch_inference_modal.py
"""
Script to run batch inference using HuggingFace's zero-shot text-classification model on Modal.
Based on the tutorial: https://modal.com/docs/guide/ex/batch_inference_using_huggingface
Goal: filter a large Hugging Face dataset for food-related images (based on the text caption).
"""
import io
import modal
@luiscape
luiscape / diffusers-rs-on-modal.py
Last active January 4, 2023 03:33
Runs `diffusers-rs` on Modal.
# Runs diffusers-rs on Modal.
import os
import time
import modal
import subprocess
CARGO_PATH:str = "/root/.cargo/bin/cargo"
def _convert_clip():
@luiscape
luiscape / stable_diffusion_cli.py
Last active January 4, 2023 04:20
StableDiffusion NGC Containers on Modal
import io
import os
import time
from pathlib import Path
import modal
import typer
stub = modal.Stub("stable-diffusion-cli")
app = typer.Typer()
@luiscape
luiscape / mounting_hf_model.py
Created December 14, 2022 23:20
`modal.Mount` a HuggingFace model
import io
import time
import modal
import os
import torch
import diffusers
from pathlib import Path
stub = modal.Stub("local-model-mount-test")
@luiscape
luiscape / download_stable_diffusion_models.py
Last active November 29, 2022 00:05
Download Stable Diffusion + `EulerAncestralDiscreteScheduler` and store pipeline
"""This script downloads all neural networks used in the HuggingFace `diffuser`'s
`StableDiffusionPipeline` pipeline. This also downloads the parameters from the scheduler
`EulerAncestralDiscreteScheduler` because that is a fast and effective scheduler.
This requires the env var `HUGGINGFACE_TOKEN` to be populated with a HuggingFace
access token.
The default cache location is: /vol/cache. This can be changed by populating
the environment variable `CACHE_PATH`.
@luiscape
luiscape / jupyter_notebook_theme.sh
Created December 19, 2017 16:14
Install and configure Jupyter Themes.
#!/bin/bash
#
# Instally the theme library.
#
pip install jupyterthemes
#
# Configure to use preferred theme.
#