Skip to content

Instantly share code, notes, and snippets.

@rajesh-s
Last active July 23, 2024 20:51
Show Gist options
  • Save rajesh-s/e5fc455d31d8b1a8b508b6e4d0041ed0 to your computer and use it in GitHub Desktop.
Save rajesh-s/e5fc455d31d8b1a8b508b6e4d0041ed0 to your computer and use it in GitHub Desktop.
llama.cpp cmake
{
"version": 4,
"configurePresets": [
{
"name": "default",
"displayName": "default",
"binaryDir": "${workspaceRoot}/build/${presetName}",
"cacheVariables": {
"CMAKE_INSTALL_PREFIX": "${workspaceRoot}/install/${presetName}",
"CMAKE_C_COMPILER": "/usr/bin/gcc",
"CMAKE_CXX_COMPILER": "/usr/bin/g++",
"CMAKE_BUILD_TYPE": "Release",
"CMAKE_CXX_FLAGS": "-mcpu=native",
"CMAKE_C_FLAGS": "-mcpu=native",
"GGML_LLAMAFILE": "OFF"
}
},
{
"name": "cuda",
"displayName": "cuda",
"binaryDir": "${workspaceRoot}/build/${presetName}",
"cacheVariables": {
"CMAKE_INSTALL_PREFIX": "${workspaceRoot}/install/${presetName}",
"CMAKE_C_COMPILER": "/opt/nvidia/hpc_sdk/Linux_aarch64/24.3/compilers/bin/nvcc",
"CMAKE_CXX_COMPILER": "/opt/nvidia/hpc_sdk/Linux_aarch64/24.3/compilers/bin/nvc++",
"CMAKE_BUILD_TYPE": "Release",
"CMAKE_CXX_FLAGS": "-mcpu=native",
"CMAKE_C_FLAGS": "-mcpu=native",
"GGML_LLAMAFILE": "OFF",
"GGML_CUDA": "ON"
}
},
{
"name": "debug",
"displayName": "debug",
"binaryDir": "${sourceDir}/out/build/${presetName}",
"cacheVariables": {
"CMAKE_INSTALL_PREFIX": "${sourceDir}/out/install/${presetName}",
"CMAKE_C_COMPILER": "/usr/bin/aarch64-linux-gnu-gcc",
"CMAKE_CXX_COMPILER": "/usr/bin/aarch64-linux-gnu-g++",
"CMAKE_BUILD_TYPE": "Debug",
"GGML_LLAMAFILE": "OFF"
}
}
],
"buildPresets": [
{
"name": "default",
"configurePreset": "default",
"jobs": 8
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment