https://github.com/jondurbin/airoboros
pip install --upgrade airoboros==2.0.11
{ | |
"name": "OpenChat 3.5", | |
"load_params": { | |
"n_ctx": 8192, | |
"n_batch": 512, | |
"rope_freq_base": 10000, | |
"rope_freq_scale": 1, | |
"n_gpu_layers": 80, | |
"use_mlock": true, | |
"main_gpu": 0, |
from vueGPT import prompt, make_client | |
from dotenv import load_dotenv | |
# load .env file | |
load_dotenv() | |
# get openai api key | |
OPENAI_API_KEY = environ.get('OPENAI_API_KEY') |
from vueGPT import prompt, make_client | |
from dotenv import load_dotenv | |
# load .env file | |
load_dotenv() | |
# get openai api key | |
OPENAI_API_KEY = environ.get('OPENAI_API_KEY') |
# once you have an app created and downloaded: | |
curl -sSL https://get.wasp-lang.dev/installer.sh | sh | |
cd {app_folder} | |
# to install NVM: | |
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash | |
nvm install 18 | |
nvm use 18 | |
wasp db migrate-dev | |
wasp start |
https://github.com/jondurbin/airoboros
pip install --upgrade airoboros==2.0.11
#!/usr/bin/env python3 | |
import os | |
import sqlite3 | |
import sys | |
try: | |
os.mkdir('config_restore') | |
except: | |
pass |
# Clone llama.cpp | |
git clone https://github.com/ggerganov/llama.cpp.git | |
cd llama.cpp | |
# Build it | |
LLAMA_METAL=1 make | |
# Download model | |
export MODEL=llama-2-13b-chat.ggmlv3.q4_0.bin | |
wget "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/${MODEL}" |
// Name: Midjourney Prompt | |
// Description: Generate a Random Midjourney Prompt | |
// Author: John Lindquist | |
// Twitter: @johnlindquist | |
import "@johnlindquist/kit" | |
let count = parseInt( | |
await arg({ | |
placeholder: "How many prompts to generate and paste?", |
// Name: Midjourney Imagine | |
// Description: Generate a random Midjourney Prompt | |
// Author: John Lindquist | |
// Twitter: @johnlindquist | |
import "@johnlindquist/kit" | |
let count = parseInt( | |
await arg({ | |
placeholder: "How many prompts to generate and paste?", |
import torch | |
print(torch.version.cuda) | |
print(torch.cuda.is_available()) |