mirror of
https://github.com/mudler/LocalAI.git
synced 2025-02-21 09:41:45 +00:00
fix: OSX Build Files for llama.cpp (#1836)
bot ate my changes, seperate branch
This commit is contained in:
parent
3882130911
commit
45d520f913
4
Makefile
4
Makefile
@ -463,7 +463,7 @@ backend-assets/grpc/llama: backend-assets/grpc sources/go-llama/libbinding.a
|
|||||||
$(GOCMD) build -ldflags "$(LD_FLAGS)" -tags "$(GO_TAGS)" -o backend-assets/grpc/llama ./backend/go/llm/llama/
|
$(GOCMD) build -ldflags "$(LD_FLAGS)" -tags "$(GO_TAGS)" -o backend-assets/grpc/llama ./backend/go/llm/llama/
|
||||||
# TODO: every binary should have its own folder instead, so can have different implementations
|
# TODO: every binary should have its own folder instead, so can have different implementations
|
||||||
ifeq ($(BUILD_TYPE),metal)
|
ifeq ($(BUILD_TYPE),metal)
|
||||||
cp backend/cpp/llama/llama.cpp/ggml-metal.metal backend-assets/grpc/
|
cp backend/cpp/llama/llama.cpp/ggml-common.h backend-assets/grpc/
|
||||||
endif
|
endif
|
||||||
|
|
||||||
## BACKEND CPP LLAMA START
|
## BACKEND CPP LLAMA START
|
||||||
@ -494,7 +494,7 @@ backend-assets/grpc/llama-cpp: backend-assets/grpc backend/cpp/llama/grpc-server
|
|||||||
cp -rfv backend/cpp/llama/grpc-server backend-assets/grpc/llama-cpp
|
cp -rfv backend/cpp/llama/grpc-server backend-assets/grpc/llama-cpp
|
||||||
# TODO: every binary should have its own folder instead, so can have different metal implementations
|
# TODO: every binary should have its own folder instead, so can have different metal implementations
|
||||||
ifeq ($(BUILD_TYPE),metal)
|
ifeq ($(BUILD_TYPE),metal)
|
||||||
cp backend/cpp/llama/llama.cpp/build/bin/ggml-metal.metal backend-assets/grpc/
|
cp backend/cpp/llama/llama.cpp/build/bin/ggml-common.h backend-assets/grpc/
|
||||||
endif
|
endif
|
||||||
|
|
||||||
backend-assets/grpc/llama-ggml: backend-assets/grpc sources/go-llama-ggml/libbinding.a
|
backend-assets/grpc/llama-ggml: backend-assets/grpc sources/go-llama-ggml/libbinding.a
|
||||||
|
@ -18,6 +18,9 @@ else ifeq ($(BUILD_TYPE),clblas)
|
|||||||
# If it's hipblas we do have also to set CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++
|
# If it's hipblas we do have also to set CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++
|
||||||
else ifeq ($(BUILD_TYPE),hipblas)
|
else ifeq ($(BUILD_TYPE),hipblas)
|
||||||
CMAKE_ARGS+=-DLLAMA_HIPBLAS=ON
|
CMAKE_ARGS+=-DLLAMA_HIPBLAS=ON
|
||||||
|
# If it's OSX, embed the metal library for fewer moving parts.
|
||||||
|
else ifeq ($(BUILD_TYPE),metal)
|
||||||
|
CMAKE_ARGS+=-DLLAMA_METAL_EMBED_LIBRARY=ON
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE),sycl_f16)
|
ifeq ($(BUILD_TYPE),sycl_f16)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user