lollms-webui/installations/install_backend_gpu.sh

8 lines
269 B
Bash
Raw Normal View History

2023-05-20 11:16:01 +00:00
echo "this will recompile llapacpp to use your hardware with gpu enabled."
pip uninstall llama-cpp-python -y
# First we need to purge any old installation
pip cache purge
export CMAKE_ARGS="-DLLAMA_CUBLAS=on"
export FORCE_CMAKE=1
pip install llama-cpp-python --upgrade