llama-cpp-capacitor
Version:
A native Capacitor plugin that embeds llama.cpp directly into mobile apps, enabling offline AI inference with chat-first API design. Supports both simple text generation and advanced chat conversations with system prompts, multimodal processing, TTS, LoRA
136 lines (124 loc) • 4.58 kB
Plain Text
cmake_minimum_required(VERSION 3.10)
project(llama-cpp)
set(CMAKE_CXX_STANDARD 17)
set(LLAMACPP_LIB_DIR ${CMAKE_SOURCE_DIR}/../../../cpp)
include_directories(
${LLAMACPP_LIB_DIR}
${LLAMACPP_LIB_DIR}/ggml-cpu
${LLAMACPP_LIB_DIR}/tools/mtmd
)
set(
SOURCE_FILES
${LLAMACPP_LIB_DIR}/ggml.c
${LLAMACPP_LIB_DIR}/ggml-alloc.c
${LLAMACPP_LIB_DIR}/ggml-backend.cpp
${LLAMACPP_LIB_DIR}/ggml-backend-reg.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/amx/amx.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/amx/mmq.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/ggml-cpu.c
${LLAMACPP_LIB_DIR}/ggml-cpu/ggml-cpu.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/quants.c
${LLAMACPP_LIB_DIR}/ggml-cpu/traits.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/repack.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/unary-ops.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/binary-ops.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/vec.cpp
${LLAMACPP_LIB_DIR}/ggml-cpu/ops.cpp
${LLAMACPP_LIB_DIR}/ggml-opt.cpp
${LLAMACPP_LIB_DIR}/ggml-threading.cpp
${LLAMACPP_LIB_DIR}/ggml-quants.c
${LLAMACPP_LIB_DIR}/gguf.cpp
${LLAMACPP_LIB_DIR}/log.cpp
${LLAMACPP_LIB_DIR}/llama-impl.cpp
${LLAMACPP_LIB_DIR}/chat-parser.cpp
${LLAMACPP_LIB_DIR}/json-partial.cpp
${LLAMACPP_LIB_DIR}/regex-partial.cpp
# Multimodal support
${LLAMACPP_LIB_DIR}/tools/mtmd/mtmd.cpp
${LLAMACPP_LIB_DIR}/tools/mtmd/mtmd-audio.cpp
${LLAMACPP_LIB_DIR}/tools/mtmd/clip.cpp
${LLAMACPP_LIB_DIR}/tools/mtmd/mtmd-helper.cpp
${LLAMACPP_LIB_DIR}/llama-grammar.cpp
${LLAMACPP_LIB_DIR}/llama-sampling.cpp
${LLAMACPP_LIB_DIR}/llama-vocab.cpp
${LLAMACPP_LIB_DIR}/llama-adapter.cpp
${LLAMACPP_LIB_DIR}/llama-chat.cpp
${LLAMACPP_LIB_DIR}/llama-context.cpp
${LLAMACPP_LIB_DIR}/llama-arch.cpp
${LLAMACPP_LIB_DIR}/llama-batch.cpp
${LLAMACPP_LIB_DIR}/llama-cparams.cpp
${LLAMACPP_LIB_DIR}/llama-hparams.cpp
${LLAMACPP_LIB_DIR}/llama.cpp
${LLAMACPP_LIB_DIR}/llama-model.cpp
${LLAMACPP_LIB_DIR}/llama-model-loader.cpp
${LLAMACPP_LIB_DIR}/llama-model-saver.cpp
${LLAMACPP_LIB_DIR}/llama-kv-cache.cpp
${LLAMACPP_LIB_DIR}/llama-kv-cache-iswa.cpp
${LLAMACPP_LIB_DIR}/llama-memory-hybrid.cpp
${LLAMACPP_LIB_DIR}/llama-memory-recurrent.cpp
${LLAMACPP_LIB_DIR}/llama-mmap.cpp
${LLAMACPP_LIB_DIR}/llama-vocab.cpp
${LLAMACPP_LIB_DIR}/llama-memory.cpp
${LLAMACPP_LIB_DIR}/llama-io.cpp
${LLAMACPP_LIB_DIR}/llama-graph.cpp
${LLAMACPP_LIB_DIR}/sampling.cpp
${LLAMACPP_LIB_DIR}/unicode-data.cpp
${LLAMACPP_LIB_DIR}/unicode.cpp
${LLAMACPP_LIB_DIR}/common.cpp
${LLAMACPP_LIB_DIR}/chat.cpp
${LLAMACPP_LIB_DIR}/json-schema-to-grammar.cpp
${LLAMACPP_LIB_DIR}/nlohmann/json.hpp
${LLAMACPP_LIB_DIR}/nlohmann/json_fwd.hpp
${LLAMACPP_LIB_DIR}/minja/minja.hpp
${LLAMACPP_LIB_DIR}/minja/chat-template.hpp
${LLAMACPP_LIB_DIR}/anyascii.c
${LLAMACPP_LIB_DIR}/cap-llama.cpp
${LLAMACPP_LIB_DIR}/cap-completion.cpp
${LLAMACPP_LIB_DIR}/cap-tts.cpp
${CMAKE_SOURCE_DIR}/jni-utils.h
${CMAKE_SOURCE_DIR}/jni.cpp
)
find_library(LOG_LIB log)
# X86_64 specific build function for emulator
function(build_library_x86_64 target_name)
add_library(
${target_name}
SHARED
${SOURCE_FILES}
)
# X86_64 specific compile options for emulator
target_compile_options(${target_name} PRIVATE
-march=x86-64
-mtune=generic
-mavx2
-mavx
-msse3
-msse
-mfma
-mf16c
-O3
-DNDEBUG
-DLM_GGML_USE_CPU
-DLM_GGML_CPU_GENERIC
-DLM_GGML_USE_AVX2
-DLM_GGML_USE_AVX
-DLM_GGML_USE_SSE3
-DLM_GGML_USE_SSE
-DLM_GGML_USE_FMA
-DLM_GGML_USE_F16C
)
target_link_libraries(${target_name} ${LOG_LIB})
# Set output name
set_target_properties(${target_name} PROPERTIES
OUTPUT_NAME "llama-cpp-x86_64"
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/jniLibs/x86_64"
)
endfunction()
# Build x86_64 library for emulator
build_library_x86_64(llama-cpp-x86_64)
# Print build information
message(STATUS "Building llama-cpp for Android x86_64 (emulator)")
message(STATUS "Source directory: ${LLAMACPP_LIB_DIR}")
message(STATUS "Architecture: x86_64")
message(STATUS "Optimizations: AVX2, AVX, SSE3, SSE, FMA, F16C")
message(STATUS "Output directory: ${CMAKE_CURRENT_SOURCE_DIR}/jniLibs/x86_64")