Last active
April 14, 2024 20:45
-
-
Save suoko/46f5810de7fc9d3e46dac6ae61a97181 to your computer and use it in GitHub Desktop.
Open devin
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
sudo apt update | |
sudo apt install curl build-essential docker-compose-v2 | |
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh | bash | |
export NVM_DIR="$HOME/.nvm" | |
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh | |
chmod +x https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh | |
Miniconda3-latest-Linux-x86_64.sh | |
curl -fsSL https://ollama.com/install.sh | sh | |
ollama run mistral:7b-instruct-v0.2-q4_K_M | |
git clone https://github.com/OpenDevin/OpenDevin.git | |
cd OpenDevin | |
conda create -n od python=3.11 | |
conda activate od | |
Edit config.toml and put this: | |
LLM_API_KEY="ollama" | |
LLM_MODEL="ollama/mistral:7b-instruct-v0.2-q4_K_M" | |
LLM_EMBEDDING_MODEL="local" | |
LLM_BASE_URL="http://localhost:11434" | |
WORKSPACE_DIR="./workspace" | |
mkdir workspace | |
Add to /etc/systemd/system/ollama.service this line: | |
Environment="OLLAMA_HOST=0.0.0.0" | |
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash | |
nvm install 18.17.1 | |
nvm use 18.17.1 | |
python -m pip install litellm uvicorn fastapi toml playwright json_repair chromadb json_repair llama_index llama_index.vector_stores.chroma llama_index.embeddings.huggingface termcolor pexpect docker poetry PyJWT | |
make build | |
make run | |
- OR - | |
In one conda activated terminal: | |
make start-backend | |
In another conda activated terminal: | |
make start-frontend |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
That guide is for Ubuntu, something might differ in wsl