https://www.kaggle.com/datasets/alexpunnen/wikipedia-history-of-save-failures
docker run --rm -it --net=host -v /home/xxx:/home alexcpn/fb_prophet_python:1 cd /home/xx//python python outlier_full.py
https://www.kaggle.com/datasets/alexpunnen/wikipedia-history-of-save-failures
docker run --rm -it --net=host -v /home/xxx:/home alexcpn/fb_prophet_python:1 cd /home/xx//python python outlier_full.py
| # Clone llama.cpp | |
| git clone https://github.com/ggerganov/llama.cpp.git | |
| cd llama.cpp | |
| # Build it | |
| make clean | |
| LLAMA_METAL=1 make | |
| # Download model | |
| export MODEL=llama-2-13b-chat.ggmlv3.q4_0.bin |