Skip to content

Instantly share code, notes, and snippets.

@hacker1024
Created February 4, 2025 12:04
Show Gist options
  • Save hacker1024/f34fd52251890abd4058fdcde438aed9 to your computer and use it in GitHub Desktop.
Save hacker1024/f34fd52251890abd4058fdcde438aed9 to your computer and use it in GitHub Desktop.
An attempt at building a static llama.cpp library with Nix, intended for Android.
let
pkgs = import <nixpkgs> {
crossSystem = {
config = "aarch64-unknown-linux-musl";
gcc.arch = "armv9-a";
useLLVM = true;
isStatic = true;
};
# Optimisation - disable unnecessary X11 dependencies.
overlays = [ (self: super: { vulkan-loader = super.vulkan-loader.override { enableX11 = false; }; }) ];
};
llama-cpp = pkgs.callPackage (builtins.fetchTarball "https://github.com/ggerganov/llama.cpp/archive/master.tar.gz" + "/.devops/nix/scope.nix") { };
inherit (pkgs) lib;
in
(llama-cpp.llama-cpp.override {
enableCurl = false;
useBlas = false;
useVulkan = false; # https://github.com/ggerganov/llama.cpp/issues/11654
blas = pkgs.openblas;
}).overrideAttrs (final: prev: {
outputs = [ "out" "lib" "dev" ];
nativeBuildInputs = prev.nativeBuildInputs or [ ] ++ (with pkgs.pkgsBuildHost; [ shaderc ]);
cmakeFlags = prev.cmakeFlags or [ ] ++ [
(lib.cmakeBool "LLAMA_BUILD_TESTS" final.doCheck or false)
(lib.cmakeBool "LLAMA_BUILD_EXAMPLES" false)
(lib.cmakeBool "LLAMA_BUILD_SERVER" false)
];
})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment