Skip to content

Instantly share code, notes, and snippets.

import argparse, os, sys, glob
import torch
import torch.nn as nn
import numpy as np
from omegaconf import OmegaConf
from PIL import Image
from tqdm import tqdm, trange
from itertools import islice
from einops import rearrange
from torchvision.utils import make_grid
2022-10-08 03:30:04 1922586752 img_emb_0000.npy
2022-10-08 03:57:15 1920735360 img_emb_0022.npy
2022-10-08 03:28:48 1921157248 img_emb_0044.npy
2022-10-08 03:28:30 1922345088 img_emb_0066.npy
2022-10-08 03:26:16 1922654336 img_emb_0088.npy
2022-10-08 03:31:30 1922353280 img_emb_0110.npy
2022-10-08 03:28:30 1922943104 img_emb_0132.npy
2022-10-08 03:26:54 1923940480 img_emb_0154.npy
2022-10-08 03:31:19 1922218112 img_emb_0176.npy
2022-10-08 03:28:39 1922607232 img_emb_0198.npy
@nousr
nousr / results.json
Last active October 20, 2022 02:44
vit-h-14 embeddings sanity check
[
{
"caption": "View Over Pyrenees Mountains, Spain",
"url": "https://static1.bigstockphoto.com/thumbs/0/0/1/large2/100000799.jpg"
},
{
"caption": "Mountain landscape with deer and forest at sunset",
"url": "https://cdn.xxl.thumbs.canstockphoto.com/vector-illustration-of-mountain-landscape-with-deer-and-forest-at-sunset-clip-art-vector_csp56740268.jpg"
},
{
import os
import fsspec
import click
import huggingface_hub as hf_hub
from time import perf_counter
from braceexpand import braceexpand
import multiprocessing as mp
from tqdm import tqdm
@nousr
nousr / prior.sh
Last active October 25, 2022 23:53
LOCAL_ID=$SLURM_LOCALID
GLOBAL_RANK=$SLURM_PROCID
NODE_ID=$SLURM_NODEID
NUM_PROCS=$SLURM_NTASKS
CPUS=$SLURM_CPUS_PER_GPU
NUM_NODES=$SLURM_NNODES
MASTER_ADDR=$(scontrol show hostnames "$SLURM_JOB_NODELIST" | head -n 1)
if [ $GLOBAL_RANK == 0 ]
then
import wandb
import torch
from dalle2_pytorch import (
T5OpenClipAdapter,
DiffusionPriorNetwork,
DiffusionPrior,
)
from dalle2_pytorch.trainer import DiffusionPriorTrainer
from transformers import T5Tokenizer
from accelerate import Accelerator
@nousr
nousr / indexer.sh
Last active November 18, 2022 23:30
#!/bin/bash
# step 1: get environment variables
# step 2: setup rank 0 to be the master and start the indexing python file (the last two operations happen in parallel)
# step 3: setup rank 1-N to be the workers and start the worker script (they then will listen for work from the master)
# get environment variables
GLOBAL_RANK=$SLURM_PROCID
CPUS=$SLURM_CPUS_PER_TASK
MEM=$SLURM_MEM_PER_NODE # seems to be in MB
We can make this file beautiful and searchable if this error is corrected: Unclosed quoted field in line 4.
name,prompt,negative_prompt
None,,
MDJ_style_base,"an extremely realistic digital painting of a (futuristic an alleyway) street in the style of cyberpunk, a photograph of a cyberpunk street alley at sunset, digital art trending on artstation, ((pixel art)), light rays shining","blurry, bad resolution, low quality, watermarked image, america, jpeg artifacts, background, words, font, text, boring, robot, human, full body, people in a street, purple"
MDJ_street_car,"an extremely realistic digital painting of a (cyberpunk volkswagen r32) on the street in the style of cyberpunk, a photograph of a cyberpunk street alley at night, digital art trending on artstation, ((pixel art)), light rays shining","bad resolution, low quality, watermarked image, human, ((simple)), [dark], blurry, bad resolution, low quality, ((watermarked image)), america, jpeg artifacts, words, font, text, boring, robot, human, full body, people in a street stock photo AND alamy, shutterstock, pixabay, flickr AND picture frame, bad perspective,
@lru_cache(maxsize=None)
def load_safety_model(clip_model):
"""load the safety model"""
import torch # pylint: disable=import-outside-toplevel
import autokeras as ak # pylint: disable=import-outside-toplevel
from tensorflow.keras.models import load_model # pylint: disable=import-outside-toplevel
class H14_NSFW_Detector(nn.Module):
def __init__(self, input_size=1024):
super().__init__()
import torch
import webdataset as wds
from typing import Union, Sequence
from time import perf_counter
from accelerate import Accelerator
def process(src):
"""
Filter empty samples from the clap dataset.