cuda : fix CUDA_FLAGS not being applied (llama/10403)

This commit is contained in:
Diego Devesa 2024-11-19 14:29:38 +01:00 committed by Georgi Gerganov
parent 8ee767732f
commit 5f6d6919b4
2 changed files with 1 additions and 2 deletions

View File

@ -75,7 +75,6 @@ if (BLAS_FOUND)
message(STATUS "BLAS found, Includes: ${BLAS_INCLUDE_DIRS}")
#add_compile_options(${BLAS_LINKER_FLAGS})
target_compile_options(ggml-blas PRIVATE ${BLAS_LINKER_FLAGS})
if (${BLAS_INCLUDE_DIRS} MATCHES "mkl" AND (${GGML_BLAS_VENDOR} MATCHES "Generic" OR ${GGML_BLAS_VENDOR} MATCHES "Intel"))

View File

@ -149,7 +149,7 @@ if (CUDAToolkit_FOUND)
list(APPEND CUDA_FLAGS -Xcompiler ${CUDA_CXX_FLAGS_JOINED})
endif()
add_compile_options("$<$<COMPILE_LANGUAGE:CUDA>:${CUDA_FLAGS}>")
target_compile_options(ggml-cuda PRIVATE "$<$<COMPILE_LANGUAGE:CUDA>:${CUDA_FLAGS}>")
else()
message(FATAL_ERROR "CUDA Toolkit not found")
endif()