Compare commits

...

3 Commits

Author SHA1 Message Date
Ettore Di Giacinto
59bfc67ead workaround upstream issue
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
2024-07-24 11:02:58 +02:00
Ettore Di Giacinto
f80b6dfc2d test
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
2024-07-24 10:23:41 +02:00
Ettore Di Giacinto
4c16957448 deps(llama.cpp): bump, try to fix sycl linking issues
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
2024-07-24 08:59:18 +02:00
4 changed files with 31 additions and 7 deletions

View File

@@ -8,7 +8,7 @@ DETECT_LIBS?=true
# llama.cpp versions
GOLLAMA_REPO?=https://github.com/go-skynet/go-llama.cpp
GOLLAMA_VERSION?=2b57a8ae43e4699d3dc5d1496a1ccd42922993be
CPPLLAMA_VERSION?=081fe431aa8fb6307145c4feb3eed4f48cab19f8
CPPLLAMA_VERSION?=b841d0740855c5af1344a81f261139a45a2b39ee
# gpt4all version
GPT4ALL_REPO?=https://github.com/nomic-ai/gpt4all

View File

@@ -75,11 +75,24 @@ add_library(hw_grpc_proto
${hw_proto_hdrs} )
add_executable(${TARGET} grpc-server.cpp utils.hpp json.hpp)
target_link_libraries(${TARGET} PRIVATE common llama myclip ${CMAKE_THREAD_LIBS_INIT} absl::flags hw_grpc_proto
absl::flags_parse
gRPC::${_REFLECTION}
gRPC::${_GRPC_GRPCPP}
protobuf::${_PROTOBUF_LIBPROTOBUF})
# Conditionally link SYCL to grpc-server
# https://github.com/ggerganov/llama.cpp/issues/8665
if ( DEFINED ENV{ONEAPI_ROOT})
target_link_libraries(${TARGET} PRIVATE common llama myclip ${CMAKE_THREAD_LIBS_INIT} absl::flags hw_grpc_proto
absl::flags_parse
gRPC::${_REFLECTION}
gRPC::${_GRPC_GRPCPP}
protobuf::${_PROTOBUF_LIBPROTOBUF}
sycl)
else()
target_link_libraries(${TARGET} PRIVATE common llama myclip ${CMAKE_THREAD_LIBS_INIT} absl::flags hw_grpc_proto
absl::flags_parse
gRPC::${_REFLECTION}
gRPC::${_GRPC_GRPCPP}
protobuf::${_PROTOBUF_LIBPROTOBUF})
endif()
target_compile_features(${TARGET} PRIVATE cxx_std_11)
if(TARGET BUILD_INFO)
add_dependencies(${TARGET} BUILD_INFO)

View File

@@ -0,0 +1,8 @@
# https://github.com/ggerganov/llama.cpp/issues/8665
add_executable(rpc-server rpc-server.cpp)
if ( DEFINED ENV{ONEAPI_ROOT})
target_link_libraries(rpc-server PRIVATE ggml llama sycl)
else()
target_link_libraries(rpc-server PRIVATE ggml llama)
endif()

View File

@@ -17,4 +17,7 @@ cp -rfv llama.cpp/examples/llava/clip.h llama.cpp/examples/grpc-server/clip.h
cp -rfv llama.cpp/examples/llava/llava.cpp llama.cpp/examples/grpc-server/llava.cpp
echo '#include "llama.h"' > llama.cpp/examples/grpc-server/llava.h
cat llama.cpp/examples/llava/llava.h >> llama.cpp/examples/grpc-server/llava.h
cp -rfv llama.cpp/examples/llava/clip.cpp llama.cpp/examples/grpc-server/clip.cpp
cp -rfv llama.cpp/examples/llava/clip.cpp llama.cpp/examples/grpc-server/clip.cpp
# https://github.com/ggerganov/llama.cpp/issues/8665
cp -rfv CMakeLists.txt.rpc-8662 llama.cpp/examples/rpc/CMakeLists.txt