fix: ensure correct version of torch is always installed based on BUILD_TYPE(#2890)

* fix: ensure correct version of torch is always installed based on BUILD_TYPE

Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com>

* Move causal-conv1d installation to build_types

Signed-off-by: mudler <mudler@localai.io>

* Move mamba-ssd install to build-type requirements.txt

Signed-off-by: mudler <mudler@localai.io>

---------

Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com>
Signed-off-by: mudler <mudler@localai.io>
Co-authored-by: Ettore Di Giacinto <mudler@users.noreply.github.com>
Co-authored-by: mudler <mudler@localai.io>
This commit is contained in:
cryptk 2024-08-05 11:38:33 -05:00 committed by GitHub
parent f15a93b19b
commit ed322bf59f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
45 changed files with 69 additions and 12 deletions

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -2,6 +2,5 @@ accelerate
auto-gptq==0.7.1
grpcio==1.65.1
protobuf
torch
certifi
transformers

View File

@ -0,0 +1,3 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch
torchaudio

View File

@ -0,0 +1,2 @@
torch
torchaudio

View File

@ -122,6 +122,13 @@ function installRequirements() {
requirementFiles+=("${MY_DIR}/requirements-${BUILD_PROFILE}.txt")
fi
# if BUILD_TYPE is empty, we are a CPU build, so we should try to install the CPU requirements
if [ "x${BUILD_TYPE}" == "x" ]; then
requirementFiles+=("${MY_DIR}/requirements-cpu.txt")
fi
requirementFiles+=("${MY_DIR}/requirements-after.txt")
for reqFile in ${requirementFiles[@]}; do
if [ -f ${reqFile} ]; then
echo "starting requirements install for ${reqFile}"

View File

@ -0,0 +1,3 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch
torchaudio

View File

@ -0,0 +1,2 @@
torch
torchaudio

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -8,6 +8,5 @@ opencv-python
pillow
protobuf
sentencepiece
torch
transformers
certifi

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -1,6 +1,5 @@
grpcio==1.65.0
protobuf
torch
transformers
certifi
setuptools

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -2,6 +2,5 @@ accelerate
grpcio==1.65.1
protobuf
certifi
torch
wheel
setuptools

View File

@ -0,0 +1,2 @@
causal-conv1d==1.4.0
mamba-ssm==2.2.2

View File

@ -0,0 +1 @@
torch

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -3,5 +3,4 @@
# https://github.com/Dao-AILab/causal-conv1d/issues/24
packaging
setuptools
wheel
torch==2.3.1
wheel

View File

@ -1,5 +1,3 @@
causal-conv1d==1.4.0
mamba-ssm==2.2.2
grpcio==1.65.1
protobuf
certifi

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -0,0 +1,3 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch
torchaudio

View File

@ -0,0 +1,2 @@
torch
torchaudio

View File

@ -1,7 +1,6 @@
accelerate
grpcio==1.65.1
protobuf
torch
git+https://github.com/huggingface/parler-tts.git@10016fb0300c0dc31a0fb70e26f3affee7b62f16
certifi
transformers

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -2,6 +2,5 @@ accelerate
transformers
grpcio==1.65.1
protobuf
torch
scipy==1.14.0
certifi

View File

@ -0,0 +1,2 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch

View File

@ -0,0 +1 @@
torch

View File

@ -2,7 +2,6 @@ accelerate
transformers
grpcio==1.65.1
protobuf
torch
certifi
intel-extension-for-transformers
bitsandbytes

View File

@ -0,0 +1,3 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch
torchaudio

View File

@ -0,0 +1,2 @@
torch
torchaudio

View File

@ -1 +0,0 @@
flash-attn

View File

@ -0,0 +1,3 @@
--extra-index-url https://download.pytorch.org/whl/cu118
torch
flash-attn

View File

@ -0,0 +1,2 @@
torch
flash-attn