cui-llama.rn
Version:
Fork of llama.rn for ChatterUI
148 lines (135 loc) • 4.51 kB
Plain Text
cmake_minimum_required(VERSION 3.16)
project(rnllama VERSION 1.0.0 LANGUAGES CXX C)
find_program(CCACHE_FOUND ccache)
if(CCACHE_FOUND)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
endif(CCACHE_FOUND)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# iOS specific settings
set(CMAKE_OSX_DEPLOYMENT_TARGET 13.0)
set(CMAKE_XCODE_ATTRIBUTE_ENABLE_BITCODE NO)
# Dependencies and compile options
add_definitions(
-DNDEBUG
-DO3
-DLM_GGML_USE_CPU
-DLM_GGML_USE_ACCELERATE
-DLM_GGML_USE_METAL
-DLM_GGML_METAL_USE_BF16
)
if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64;x86_64")
add_definitions(-DLM_GGML_CPU_GENERIC)
endif ()
set(SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../cpp)
if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64")
set(SOURCE_FILES_ARCH
${SOURCE_DIR}/ggml-cpu/arch/arm/quants.c
${SOURCE_DIR}/ggml-cpu/arch/arm/repack.cpp
)
endif ()
# Define public headers
set(PUBLIC_HEADERS
${SOURCE_DIR}/rn-llama.h
${SOURCE_DIR}/llama.h
${SOURCE_DIR}/llama-impl.h
${SOURCE_DIR}/ggml.h
)
# Create library target
add_library(rnllama SHARED
${SOURCE_DIR}/ggml.c
${SOURCE_DIR}/ggml-alloc.c
${SOURCE_DIR}/ggml-backend.cpp
${SOURCE_DIR}/ggml-backend-reg.cpp
${SOURCE_DIR}/ggml-cpu/amx/amx.cpp
${SOURCE_DIR}/ggml-cpu/amx/mmq.cpp
${SOURCE_DIR}/ggml-cpu/ggml-cpu.c
${SOURCE_DIR}/ggml-cpu/ggml-cpu.cpp
${SOURCE_DIR}/ggml-cpu/quants.c
${SOURCE_DIR}/ggml-cpu/traits.cpp
${SOURCE_DIR}/ggml-cpu/repack.cpp
${SOURCE_DIR}/ggml-cpu/unary-ops.cpp
${SOURCE_DIR}/ggml-cpu/binary-ops.cpp
${SOURCE_DIR}/ggml-cpu/vec.cpp
${SOURCE_DIR}/ggml-cpu/ops.cpp
${SOURCE_DIR}/ggml-metal.m
${SOURCE_DIR}/ggml-opt.cpp
${SOURCE_DIR}/ggml-threading.cpp
${SOURCE_DIR}/ggml-quants.c
${SOURCE_DIR}/gguf.cpp
${SOURCE_DIR}/log.cpp
${SOURCE_DIR}/llama-impl.cpp
${SOURCE_DIR}/llama-grammar.cpp
${SOURCE_DIR}/llama-sampling.cpp
${SOURCE_DIR}/llama-vocab.cpp
${SOURCE_DIR}/llama-adapter.cpp
${SOURCE_DIR}/llama-chat.cpp
${SOURCE_DIR}/llama-context.cpp
${SOURCE_DIR}/llama-arch.cpp
${SOURCE_DIR}/llama-batch.cpp
${SOURCE_DIR}/llama-cparams.cpp
${SOURCE_DIR}/llama-hparams.cpp
${SOURCE_DIR}/llama.cpp
${SOURCE_DIR}/llama-model.cpp
${SOURCE_DIR}/llama-model-loader.cpp
${SOURCE_DIR}/llama-model-saver.cpp
${SOURCE_DIR}/llama-mmap.cpp
${SOURCE_DIR}/llama-kv-cache-unified.cpp
${SOURCE_DIR}/llama-kv-cache-unified-iswa.cpp
${SOURCE_DIR}/llama-memory-hybrid.cpp
${SOURCE_DIR}/llama-memory-recurrent.cpp
${SOURCE_DIR}/llama-vocab.cpp
${SOURCE_DIR}/llama-memory.cpp
${SOURCE_DIR}/llama-io.cpp
${SOURCE_DIR}/llama-graph.cpp
${SOURCE_DIR}/sampling.cpp
${SOURCE_DIR}/unicode-data.cpp
${SOURCE_DIR}/unicode.cpp
${SOURCE_DIR}/common.cpp
${SOURCE_DIR}/chat.cpp
${SOURCE_DIR}/json-schema-to-grammar.cpp
${SOURCE_DIR}/minja/minja.hpp
${SOURCE_DIR}/minja/chat-template.hpp
${SOURCE_DIR}/nlohmann/json.hpp
${SOURCE_DIR}/nlohmann/json_fwd.hpp
${SOURCE_DIR}/chat-parser.cpp
${SOURCE_DIR}/json-partial.cpp
${SOURCE_DIR}/regex-partial.cpp
# Multimodal support
${SOURCE_DIR}/tools/mtmd/mtmd.cpp
${SOURCE_DIR}/tools/mtmd/mtmd-audio.cpp
${SOURCE_DIR}/tools/mtmd/clip.cpp
${SOURCE_DIR}/tools/mtmd/mtmd-helper.cpp
${SOURCE_DIR}/anyascii.c
${SOURCE_DIR}/rn-llama.cpp
${SOURCE_FILES_ARCH}
)
# Setup include directories
target_include_directories(rnllama
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../cpp>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../cpp/ggml-cpu>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../cpp/tools/mtmd>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../cpp/minja>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../cpp/nlohmann>
$<INSTALL_INTERFACE:include>
)
# Link required frameworks
target_link_libraries(rnllama PRIVATE
"-framework Accelerate"
"-framework Foundation"
"-framework Metal"
"-framework MetalKit"
)
# Set properties for framework
set_target_properties(rnllama PROPERTIES
MACOSX_FRAMEWORK_IDENTIFIER "com.rnllama"
MACOSX_FRAMEWORK_BUNDLE_VERSION 1.0.0
MACOSX_FRAMEWORK_SHORT_VERSION_STRING 1.0.0
FRAMEWORK TRUE
FRAMEWORK_VERSION 1.0.0
VERSION 1.0.0
PUBLIC_HEADER "${PUBLIC_HEADERS}"
XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC NO
)