From ed322bf59f0dcc4d3c7329c829e52e6cbcd02291 Mon Sep 17 00:00:00 2001 From: cryptk <421501+cryptk@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:38:33 -0500 Subject: [PATCH] fix: ensure correct version of torch is always installed based on BUILD_TYPE(#2890) * fix: ensure correct version of torch is always installed based on BUILD_TYPE Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * Move causal-conv1d installation to build_types Signed-off-by: mudler * Move mamba-ssd install to build-type requirements.txt Signed-off-by: mudler --------- Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> Signed-off-by: mudler Co-authored-by: Ettore Di Giacinto Co-authored-by: mudler --- backend/python/autogptq/requirements-cublas11.txt | 2 ++ backend/python/autogptq/requirements-cublas12.txt | 1 + backend/python/autogptq/requirements.txt | 1 - backend/python/bark/requirements-cublas11.txt | 3 +++ backend/python/bark/requirements-cublas12.txt | 2 ++ backend/python/common/libbackend.sh | 7 +++++++ backend/python/coqui/requirements-cublas11.txt | 3 +++ backend/python/coqui/requirements-cublas12.txt | 2 ++ backend/python/diffusers/requirements-cublas11.txt | 2 ++ backend/python/diffusers/requirements-cublas12.txt | 1 + backend/python/diffusers/requirements.txt | 1 - backend/python/exllama/requirements-cublas11.txt | 2 ++ backend/python/exllama/requirements-cublas12.txt | 1 + backend/python/exllama/requirements.txt | 1 - backend/python/exllama2/requirements-cublas11.txt | 2 ++ backend/python/exllama2/requirements-cublas12.txt | 1 + backend/python/exllama2/requirements.txt | 1 - backend/python/mamba/requirements-after.txt | 2 ++ backend/python/mamba/requirements-cpu.txt | 1 + backend/python/mamba/requirements-cublas11.txt | 2 ++ backend/python/mamba/requirements-cublas12.txt | 1 + backend/python/mamba/requirements-install.txt | 3 +-- backend/python/mamba/requirements.txt | 2 -- backend/python/openvoice/requirements-cublas11.txt | 2 ++ backend/python/openvoice/requirements-cublas12.txt | 1 + backend/python/parler-tts/requirements-cublas11.txt | 3 +++ backend/python/parler-tts/requirements-cublas12.txt | 2 ++ backend/python/parler-tts/requirements.txt | 1 - backend/python/petals/requirements-cublas11.txt | 2 ++ backend/python/petals/requirements-cublas12.txt | 1 + backend/python/rerankers/requirements-cublas11.txt | 2 ++ backend/python/rerankers/requirements-cublas12.txt | 1 + .../python/sentencetransformers/requirements-cublas11.txt | 2 ++ .../python/sentencetransformers/requirements-cublas12.txt | 1 + .../python/transformers-musicgen/requirements-cublas11.txt | 2 ++ .../python/transformers-musicgen/requirements-cublas12.txt | 1 + backend/python/transformers-musicgen/requirements.txt | 1 - backend/python/transformers/requirements-cublas11.txt | 2 ++ backend/python/transformers/requirements-cublas12.txt | 1 + backend/python/transformers/requirements.txt | 1 - backend/python/vall-e-x/requirements-cublas11.txt | 3 +++ backend/python/vall-e-x/requirements-cublas12.txt | 2 ++ backend/python/vllm/requirements-cublas.txt | 1 - backend/python/vllm/requirements-cublas11.txt | 3 +++ backend/python/vllm/requirements-cublas12.txt | 2 ++ 45 files changed, 69 insertions(+), 12 deletions(-) create mode 100644 backend/python/autogptq/requirements-cublas11.txt create mode 100644 backend/python/autogptq/requirements-cublas12.txt create mode 100644 backend/python/bark/requirements-cublas11.txt create mode 100644 backend/python/bark/requirements-cublas12.txt create mode 100644 backend/python/coqui/requirements-cublas11.txt create mode 100644 backend/python/coqui/requirements-cublas12.txt create mode 100644 backend/python/diffusers/requirements-cublas11.txt create mode 100644 backend/python/diffusers/requirements-cublas12.txt create mode 100644 backend/python/exllama/requirements-cublas11.txt create mode 100644 backend/python/exllama/requirements-cublas12.txt create mode 100644 backend/python/exllama2/requirements-cublas11.txt create mode 100644 backend/python/exllama2/requirements-cublas12.txt create mode 100644 backend/python/mamba/requirements-after.txt create mode 100644 backend/python/mamba/requirements-cpu.txt create mode 100644 backend/python/mamba/requirements-cublas11.txt create mode 100644 backend/python/mamba/requirements-cublas12.txt create mode 100644 backend/python/openvoice/requirements-cublas11.txt create mode 100644 backend/python/openvoice/requirements-cublas12.txt create mode 100644 backend/python/parler-tts/requirements-cublas11.txt create mode 100644 backend/python/parler-tts/requirements-cublas12.txt create mode 100644 backend/python/petals/requirements-cublas11.txt create mode 100644 backend/python/petals/requirements-cublas12.txt create mode 100644 backend/python/rerankers/requirements-cublas11.txt create mode 100644 backend/python/rerankers/requirements-cublas12.txt create mode 100644 backend/python/sentencetransformers/requirements-cublas11.txt create mode 100644 backend/python/sentencetransformers/requirements-cublas12.txt create mode 100644 backend/python/transformers-musicgen/requirements-cublas11.txt create mode 100644 backend/python/transformers-musicgen/requirements-cublas12.txt create mode 100644 backend/python/transformers/requirements-cublas11.txt create mode 100644 backend/python/transformers/requirements-cublas12.txt create mode 100644 backend/python/vall-e-x/requirements-cublas11.txt create mode 100644 backend/python/vall-e-x/requirements-cublas12.txt delete mode 100644 backend/python/vllm/requirements-cublas.txt create mode 100644 backend/python/vllm/requirements-cublas11.txt create mode 100644 backend/python/vllm/requirements-cublas12.txt diff --git a/backend/python/autogptq/requirements-cublas11.txt b/backend/python/autogptq/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/autogptq/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/autogptq/requirements-cublas12.txt b/backend/python/autogptq/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/autogptq/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/autogptq/requirements.txt b/backend/python/autogptq/requirements.txt index 7a1bf85f..078c015f 100644 --- a/backend/python/autogptq/requirements.txt +++ b/backend/python/autogptq/requirements.txt @@ -2,6 +2,5 @@ accelerate auto-gptq==0.7.1 grpcio==1.65.1 protobuf -torch certifi transformers \ No newline at end of file diff --git a/backend/python/bark/requirements-cublas11.txt b/backend/python/bark/requirements-cublas11.txt new file mode 100644 index 00000000..0de92979 --- /dev/null +++ b/backend/python/bark/requirements-cublas11.txt @@ -0,0 +1,3 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch +torchaudio \ No newline at end of file diff --git a/backend/python/bark/requirements-cublas12.txt b/backend/python/bark/requirements-cublas12.txt new file mode 100644 index 00000000..6c3c7e7a --- /dev/null +++ b/backend/python/bark/requirements-cublas12.txt @@ -0,0 +1,2 @@ +torch +torchaudio \ No newline at end of file diff --git a/backend/python/common/libbackend.sh b/backend/python/common/libbackend.sh index e8dfea03..7287fb95 100644 --- a/backend/python/common/libbackend.sh +++ b/backend/python/common/libbackend.sh @@ -122,6 +122,13 @@ function installRequirements() { requirementFiles+=("${MY_DIR}/requirements-${BUILD_PROFILE}.txt") fi + # if BUILD_TYPE is empty, we are a CPU build, so we should try to install the CPU requirements + if [ "x${BUILD_TYPE}" == "x" ]; then + requirementFiles+=("${MY_DIR}/requirements-cpu.txt") + fi + + requirementFiles+=("${MY_DIR}/requirements-after.txt") + for reqFile in ${requirementFiles[@]}; do if [ -f ${reqFile} ]; then echo "starting requirements install for ${reqFile}" diff --git a/backend/python/coqui/requirements-cublas11.txt b/backend/python/coqui/requirements-cublas11.txt new file mode 100644 index 00000000..0de92979 --- /dev/null +++ b/backend/python/coqui/requirements-cublas11.txt @@ -0,0 +1,3 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch +torchaudio \ No newline at end of file diff --git a/backend/python/coqui/requirements-cublas12.txt b/backend/python/coqui/requirements-cublas12.txt new file mode 100644 index 00000000..6c3c7e7a --- /dev/null +++ b/backend/python/coqui/requirements-cublas12.txt @@ -0,0 +1,2 @@ +torch +torchaudio \ No newline at end of file diff --git a/backend/python/diffusers/requirements-cublas11.txt b/backend/python/diffusers/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/diffusers/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/diffusers/requirements-cublas12.txt b/backend/python/diffusers/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/diffusers/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/diffusers/requirements.txt b/backend/python/diffusers/requirements.txt index 6f04d677..ea707bb7 100644 --- a/backend/python/diffusers/requirements.txt +++ b/backend/python/diffusers/requirements.txt @@ -8,6 +8,5 @@ opencv-python pillow protobuf sentencepiece -torch transformers certifi diff --git a/backend/python/exllama/requirements-cublas11.txt b/backend/python/exllama/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/exllama/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/exllama/requirements-cublas12.txt b/backend/python/exllama/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/exllama/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/exllama/requirements.txt b/backend/python/exllama/requirements.txt index 2aab2631..b06efcea 100644 --- a/backend/python/exllama/requirements.txt +++ b/backend/python/exllama/requirements.txt @@ -1,6 +1,5 @@ grpcio==1.65.0 protobuf -torch transformers certifi setuptools \ No newline at end of file diff --git a/backend/python/exllama2/requirements-cublas11.txt b/backend/python/exllama2/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/exllama2/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/exllama2/requirements-cublas12.txt b/backend/python/exllama2/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/exllama2/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/exllama2/requirements.txt b/backend/python/exllama2/requirements.txt index 6aae273c..f2dfa976 100644 --- a/backend/python/exllama2/requirements.txt +++ b/backend/python/exllama2/requirements.txt @@ -2,6 +2,5 @@ accelerate grpcio==1.65.1 protobuf certifi -torch wheel setuptools \ No newline at end of file diff --git a/backend/python/mamba/requirements-after.txt b/backend/python/mamba/requirements-after.txt new file mode 100644 index 00000000..ea6890eb --- /dev/null +++ b/backend/python/mamba/requirements-after.txt @@ -0,0 +1,2 @@ +causal-conv1d==1.4.0 +mamba-ssm==2.2.2 \ No newline at end of file diff --git a/backend/python/mamba/requirements-cpu.txt b/backend/python/mamba/requirements-cpu.txt new file mode 100644 index 00000000..08ed5eeb --- /dev/null +++ b/backend/python/mamba/requirements-cpu.txt @@ -0,0 +1 @@ +torch \ No newline at end of file diff --git a/backend/python/mamba/requirements-cublas11.txt b/backend/python/mamba/requirements-cublas11.txt new file mode 100644 index 00000000..2f89bd95 --- /dev/null +++ b/backend/python/mamba/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch \ No newline at end of file diff --git a/backend/python/mamba/requirements-cublas12.txt b/backend/python/mamba/requirements-cublas12.txt new file mode 100644 index 00000000..08ed5eeb --- /dev/null +++ b/backend/python/mamba/requirements-cublas12.txt @@ -0,0 +1 @@ +torch \ No newline at end of file diff --git a/backend/python/mamba/requirements-install.txt b/backend/python/mamba/requirements-install.txt index 2fc9a07c..69d263f0 100644 --- a/backend/python/mamba/requirements-install.txt +++ b/backend/python/mamba/requirements-install.txt @@ -3,5 +3,4 @@ # https://github.com/Dao-AILab/causal-conv1d/issues/24 packaging setuptools -wheel -torch==2.3.1 \ No newline at end of file +wheel \ No newline at end of file diff --git a/backend/python/mamba/requirements.txt b/backend/python/mamba/requirements.txt index 2aac2cda..068bf336 100644 --- a/backend/python/mamba/requirements.txt +++ b/backend/python/mamba/requirements.txt @@ -1,5 +1,3 @@ -causal-conv1d==1.4.0 -mamba-ssm==2.2.2 grpcio==1.65.1 protobuf certifi diff --git a/backend/python/openvoice/requirements-cublas11.txt b/backend/python/openvoice/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/openvoice/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/openvoice/requirements-cublas12.txt b/backend/python/openvoice/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/openvoice/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/parler-tts/requirements-cublas11.txt b/backend/python/parler-tts/requirements-cublas11.txt new file mode 100644 index 00000000..0de92979 --- /dev/null +++ b/backend/python/parler-tts/requirements-cublas11.txt @@ -0,0 +1,3 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch +torchaudio \ No newline at end of file diff --git a/backend/python/parler-tts/requirements-cublas12.txt b/backend/python/parler-tts/requirements-cublas12.txt new file mode 100644 index 00000000..6c3c7e7a --- /dev/null +++ b/backend/python/parler-tts/requirements-cublas12.txt @@ -0,0 +1,2 @@ +torch +torchaudio \ No newline at end of file diff --git a/backend/python/parler-tts/requirements.txt b/backend/python/parler-tts/requirements.txt index 147cad9a..1dfa6675 100644 --- a/backend/python/parler-tts/requirements.txt +++ b/backend/python/parler-tts/requirements.txt @@ -1,7 +1,6 @@ accelerate grpcio==1.65.1 protobuf -torch git+https://github.com/huggingface/parler-tts.git@10016fb0300c0dc31a0fb70e26f3affee7b62f16 certifi transformers \ No newline at end of file diff --git a/backend/python/petals/requirements-cublas11.txt b/backend/python/petals/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/petals/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/petals/requirements-cublas12.txt b/backend/python/petals/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/petals/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/rerankers/requirements-cublas11.txt b/backend/python/rerankers/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/rerankers/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/rerankers/requirements-cublas12.txt b/backend/python/rerankers/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/rerankers/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/sentencetransformers/requirements-cublas11.txt b/backend/python/sentencetransformers/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/sentencetransformers/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/sentencetransformers/requirements-cublas12.txt b/backend/python/sentencetransformers/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/sentencetransformers/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/transformers-musicgen/requirements-cublas11.txt b/backend/python/transformers-musicgen/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/transformers-musicgen/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/transformers-musicgen/requirements-cublas12.txt b/backend/python/transformers-musicgen/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/transformers-musicgen/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/transformers-musicgen/requirements.txt b/backend/python/transformers-musicgen/requirements.txt index 8ffa3c31..ac758034 100644 --- a/backend/python/transformers-musicgen/requirements.txt +++ b/backend/python/transformers-musicgen/requirements.txt @@ -2,6 +2,5 @@ accelerate transformers grpcio==1.65.1 protobuf -torch scipy==1.14.0 certifi \ No newline at end of file diff --git a/backend/python/transformers/requirements-cublas11.txt b/backend/python/transformers/requirements-cublas11.txt new file mode 100644 index 00000000..6461b696 --- /dev/null +++ b/backend/python/transformers/requirements-cublas11.txt @@ -0,0 +1,2 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch diff --git a/backend/python/transformers/requirements-cublas12.txt b/backend/python/transformers/requirements-cublas12.txt new file mode 100644 index 00000000..12c6d5d5 --- /dev/null +++ b/backend/python/transformers/requirements-cublas12.txt @@ -0,0 +1 @@ +torch diff --git a/backend/python/transformers/requirements.txt b/backend/python/transformers/requirements.txt index 55925b32..c32fe1f8 100644 --- a/backend/python/transformers/requirements.txt +++ b/backend/python/transformers/requirements.txt @@ -2,7 +2,6 @@ accelerate transformers grpcio==1.65.1 protobuf -torch certifi intel-extension-for-transformers bitsandbytes diff --git a/backend/python/vall-e-x/requirements-cublas11.txt b/backend/python/vall-e-x/requirements-cublas11.txt new file mode 100644 index 00000000..0de92979 --- /dev/null +++ b/backend/python/vall-e-x/requirements-cublas11.txt @@ -0,0 +1,3 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch +torchaudio \ No newline at end of file diff --git a/backend/python/vall-e-x/requirements-cublas12.txt b/backend/python/vall-e-x/requirements-cublas12.txt new file mode 100644 index 00000000..6c3c7e7a --- /dev/null +++ b/backend/python/vall-e-x/requirements-cublas12.txt @@ -0,0 +1,2 @@ +torch +torchaudio \ No newline at end of file diff --git a/backend/python/vllm/requirements-cublas.txt b/backend/python/vllm/requirements-cublas.txt deleted file mode 100644 index 7bfe8efe..00000000 --- a/backend/python/vllm/requirements-cublas.txt +++ /dev/null @@ -1 +0,0 @@ -flash-attn \ No newline at end of file diff --git a/backend/python/vllm/requirements-cublas11.txt b/backend/python/vllm/requirements-cublas11.txt new file mode 100644 index 00000000..bed8cea8 --- /dev/null +++ b/backend/python/vllm/requirements-cublas11.txt @@ -0,0 +1,3 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch +flash-attn \ No newline at end of file diff --git a/backend/python/vllm/requirements-cublas12.txt b/backend/python/vllm/requirements-cublas12.txt new file mode 100644 index 00000000..b6fef4d7 --- /dev/null +++ b/backend/python/vllm/requirements-cublas12.txt @@ -0,0 +1,2 @@ +torch +flash-attn \ No newline at end of file