added backend installation for linux

This commit is contained in:
Saifeddine ALOUI 2023-05-20 13:16:01 +02:00
parent 210d9d2715
commit 5a83d9c890
2 changed files with 9 additions and 2 deletions

View File

@ -220,8 +220,8 @@ class ModelProcess:
self._set_config_result['personality_status'] ='failed'
self._set_config_result['errors'].append(f"couldn't load personality:{ex}")
def step_callback(self, text, type):
print(f"Step callback : {text}")
def step_callback(self, text, message_type):
self.generation_queue.put((text,self.id, message_type))
def _run(self):
self._rebuild_model()
self._rebuild_personality()

View File

@ -0,0 +1,7 @@
echo "this will recompile llapacpp to use your hardware with gpu enabled."
pip uninstall llama-cpp-python -y
# First we need to purge any old installation
pip cache purge
export CMAKE_ARGS="-DLLAMA_CUBLAS=on"
export FORCE_CMAKE=1
pip install llama-cpp-python --upgrade