diff --git a/.dockerignore b/.dockerignore index ea2ea6b2..3954769f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,11 @@ examples/chatbot-ui/models examples/rwkv/models examples/**/models Dockerfile* +__pycache__ # SonarQube -.scannerwork \ No newline at end of file +.scannerwork + +# backend virtual environments +**/venv +backend/python/**/source \ No newline at end of file diff --git a/.github/workflows/test-extra.yml b/.github/workflows/test-extra.yml index 1bd342e6..93e3b5a2 100644 --- a/.github/workflows/test-extra.yml +++ b/.github/workflows/test-extra.yml @@ -25,22 +25,14 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev pip install --user grpcio-tools==1.63.0 - sudo rm -rfv /usr/bin/conda || true - - name: Test transformers run: | - export PATH=$PATH:/opt/conda/bin make --jobs=5 --output-sync=target -C backend/python/transformers make --jobs=5 --output-sync=target -C backend/python/transformers test @@ -55,22 +47,14 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev pip install --user grpcio-tools==1.63.0 - sudo rm -rfv /usr/bin/conda || true - - name: Test sentencetransformers run: | - export PATH=$PATH:/opt/conda/bin make --jobs=5 --output-sync=target -C backend/python/sentencetransformers make --jobs=5 --output-sync=target -C backend/python/sentencetransformers test @@ -86,22 +70,14 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev pip install --user grpcio-tools==1.63.0 - - sudo rm -rfv /usr/bin/conda || true - name: Test rerankers run: | - export PATH=$PATH:/opt/conda/bin make --jobs=5 --output-sync=target -C backend/python/rerankers make --jobs=5 --output-sync=target -C backend/python/rerankers test @@ -115,25 +91,16 @@ jobs: - name: Dependencies run: | sudo apt-get update - sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + sudo apt-get install -y build-essential ffmpeg sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh pip install --user grpcio-tools==1.63.0 - - sudo rm -rfv /usr/bin/conda || true - - name: Test diffusers run: | - export PATH=$PATH:/opt/conda/bin - make --jobs=5 --output-sync=target -C backend/python/diffusers - make --jobs=5 --output-sync=target -C backend/python/diffusers test + make --jobs=5 --output-sync=target -C backend/python/diffusers + make --jobs=5 --output-sync=target -C backend/python/diffusers test tests-parler-tts: runs-on: ubuntu-latest @@ -146,22 +113,14 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev pip install --user grpcio-tools==1.63.0 - - sudo rm -rfv /usr/bin/conda || true - name: Test parler-tts run: | - export PATH=$PATH:/opt/conda/bin make --jobs=5 --output-sync=target -C backend/python/parler-tts make --jobs=5 --output-sync=target -C backend/python/parler-tts test @@ -176,22 +135,14 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev pip install --user grpcio-tools==1.63.0 - - sudo rm -rfv /usr/bin/conda || true - name: Test transformers-musicgen run: | - export PATH=$PATH:/opt/conda/bin make --jobs=5 --output-sync=target -C backend/python/transformers-musicgen make --jobs=5 --output-sync=target -C backend/python/transformers-musicgen test @@ -208,22 +159,14 @@ jobs: # run: | # sudo apt-get update # sudo apt-get install build-essential ffmpeg - # curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - # sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - # gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - # sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - # sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - # sudo apt-get update && \ - # sudo apt-get install -y conda + # # Install UV + # curl -LsSf https://astral.sh/uv/install.sh | sh # sudo apt-get install -y ca-certificates cmake curl patch python3-pip # sudo apt-get install -y libopencv-dev # pip install --user grpcio-tools==1.63.0 - - # sudo rm -rfv /usr/bin/conda || true # - name: Test petals # run: | - # export PATH=$PATH:/opt/conda/bin # make --jobs=5 --output-sync=target -C backend/python/petals # make --jobs=5 --output-sync=target -C backend/python/petals test @@ -280,22 +223,14 @@ jobs: # run: | # sudo apt-get update # sudo apt-get install build-essential ffmpeg - # curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - # sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - # gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - # sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - # sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - # sudo apt-get update && \ - # sudo apt-get install -y conda + # # Install UV + # curl -LsSf https://astral.sh/uv/install.sh | sh # sudo apt-get install -y ca-certificates cmake curl patch python3-pip # sudo apt-get install -y libopencv-dev # pip install --user grpcio-tools==1.63.0 - - # sudo rm -rfv /usr/bin/conda || true # - name: Test bark # run: | - # export PATH=$PATH:/opt/conda/bin # make --jobs=5 --output-sync=target -C backend/python/bark # make --jobs=5 --output-sync=target -C backend/python/bark test @@ -313,20 +248,13 @@ jobs: # run: | # sudo apt-get update # sudo apt-get install build-essential ffmpeg - # curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - # sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - # gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - # sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - # sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - # sudo apt-get update && \ - # sudo apt-get install -y conda + # # Install UV + # curl -LsSf https://astral.sh/uv/install.sh | sh # sudo apt-get install -y ca-certificates cmake curl patch python3-pip # sudo apt-get install -y libopencv-dev # pip install --user grpcio-tools==1.63.0 - # sudo rm -rfv /usr/bin/conda || true # - name: Test vllm # run: | - # export PATH=$PATH:/opt/conda/bin # make --jobs=5 --output-sync=target -C backend/python/vllm # make --jobs=5 --output-sync=target -C backend/python/vllm test tests-vallex: @@ -340,20 +268,13 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake curl patch python3-pip sudo apt-get install -y libopencv-dev pip install --user grpcio-tools==1.63.0 - sudo rm -rfv /usr/bin/conda || true - name: Test vall-e-x run: | - export PATH=$PATH:/opt/conda/bin make --jobs=5 --output-sync=target -C backend/python/vall-e-x make --jobs=5 --output-sync=target -C backend/python/vall-e-x test @@ -368,19 +289,11 @@ jobs: run: | sudo apt-get update sudo apt-get install build-essential ffmpeg - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \ - sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ - sudo apt-get update && \ - sudo apt-get install -y conda sudo apt-get install -y ca-certificates cmake curl patch espeak espeak-ng python3-pip + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh pip install --user grpcio-tools==1.63.0 - sudo rm -rfv /usr/bin/conda || true - - name: Test coqui run: | - export PATH=$PATH:/opt/conda/bin - make --jobs=5 --output-sync=target -C backend/python/coqui - make --jobs=5 --output-sync=target -C backend/python/coqui test + make --jobs=5 --output-sync=target -C backend/python/coqui + make --jobs=5 --output-sync=target -C backend/python/coqui test \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6fa003b3..a7146774 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -78,6 +78,8 @@ jobs: sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \ sudo apt-get update && \ sudo apt-get install -y conda + # Install UV + curl -LsSf https://astral.sh/uv/install.sh | sh sudo apt-get install -y ca-certificates cmake patch python3-pip unzip sudo apt-get install -y libopencv-dev diff --git a/.gitignore b/.gitignore index 9338b0c4..07b8dbff 100644 --- a/.gitignore +++ b/.gitignore @@ -46,4 +46,7 @@ prepare *pb2_grpc.py # SonarQube -.scannerwork \ No newline at end of file +.scannerwork + +# backend virtual environments +**/venv \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 1a036443..9680ba5c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -76,26 +76,16 @@ RUN test -n "$TARGETARCH" \ # The requirements-extras target is for any builds with IMAGE_TYPE=extras. It should not be placed in this target unless every IMAGE_TYPE=extras build will use it FROM requirements-core AS requirements-extras -RUN apt-get update && \ - apt-get install -y --no-install-recommends gpg && \ - curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \ - install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \ - gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \ - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list && \ - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list && \ - apt-get update && \ - apt-get install -y --no-install-recommends \ - conda && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - +RUN curl -LsSf https://astral.sh/uv/install.sh | sh ENV PATH="/root/.cargo/bin:${PATH}" RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y RUN apt-get update && \ apt-get install -y --no-install-recommends \ espeak-ng \ - espeak && \ + espeak \ + python3-dev \ + python3-venv && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* @@ -246,6 +236,7 @@ ARG FFMPEG ARG BUILD_TYPE ARG TARGETARCH ARG IMAGE_TYPE=extras +ARG EXTRA_BACKENDS ARG MAKEFLAGS ENV BUILD_TYPE=${BUILD_TYPE} @@ -257,7 +248,6 @@ ARG CUDA_MAJOR_VERSION=11 ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility ENV NVIDIA_REQUIRE_CUDA="cuda>=${CUDA_MAJOR_VERSION}.0" ENV NVIDIA_VISIBLE_DEVICES=all -ENV PIP_CACHE_PURGE=true # Add FFmpeg RUN if [ "${FFMPEG}" = "true" ]; then \ @@ -290,51 +280,58 @@ COPY --from=builder /build/sources/go-piper/piper-phonemize/pi/lib/* /usr/lib/ # do not let stablediffusion rebuild (requires an older version of absl) COPY --from=builder /build/backend-assets/grpc/stablediffusion ./backend-assets/grpc/stablediffusion -## Duplicated from Makefile to avoid having a big layer that's hard to push -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/autogptq \ +# Change the shell to bash so we can use [[ tests below +SHELL ["/bin/bash", "-c"] +# We try to strike a balance between individual layer size (as that affects total push time) and total image size +# Splitting the backends into more groups with fewer items results in a larger image, but a smaller size for the largest layer +# Splitting the backends into fewer groups with more items results in a smaller image, but a larger size for the largest layer + +RUN if [[ ( "${EXTRA_BACKENDS}" =~ "coqui" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/coqui \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "parler-tts" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/parler-tts \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "diffusers" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/diffusers \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "transformers-musicgen" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/transformers-musicgen \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "exllama1" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/exllama \ ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/bark \ + +RUN if [[ ( "${EXTRA_BACKENDS}" =~ "vall-e-x" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/vall-e-x \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "petals" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/petals \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "sentencetransformers" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/sentencetransformers \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "exllama2" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/exllama2 \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "transformers" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/transformers \ ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/diffusers \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/vllm \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/mamba \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/sentencetransformers \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/rerankers \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/transformers \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/vall-e-x \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/exllama \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/exllama2 \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/petals \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/transformers-musicgen \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/parler-tts \ - ; fi -RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \ - make -C backend/python/coqui \ + +RUN if [[ ( "${EXTRA_BACKENDS}" =~ "vllm" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/vllm \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "autogptq" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/autogptq \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "bark" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/bark \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "rerankers" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/rerankers \ + ; fi && \ + if [[ ( "${EXTRA_BACKENDS}" =~ "mamba" || -z "${EXTRA_BACKENDS}" ) && "$IMAGE_TYPE" == "extras" ]]; then \ + make -C backend/python/mamba \ ; fi # Make sure the models directory exists diff --git a/backend/cpp/llama/Makefile b/backend/cpp/llama/Makefile index ed610861..6fc6b49a 100644 --- a/backend/cpp/llama/Makefile +++ b/backend/cpp/llama/Makefile @@ -62,8 +62,8 @@ grpc-server: llama.cpp llama.cpp/examples/grpc-server @echo "Building grpc-server with $(BUILD_TYPE) build type and $(CMAKE_ARGS)" ifneq (,$(findstring sycl,$(BUILD_TYPE))) bash -c "source $(ONEAPI_VARS); \ - cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && cmake --build . --config Release" + cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && $(MAKE)" else - cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && cmake --build . --config Release + cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && $(MAKE) endif cp llama.cpp/build/bin/grpc-server . \ No newline at end of file diff --git a/backend/python/autogptq/Makefile b/backend/python/autogptq/Makefile index eb81f045..5e912a2c 100644 --- a/backend/python/autogptq/Makefile +++ b/backend/python/autogptq/Makefile @@ -1,6 +1,6 @@ .PHONY: autogptq autogptq: protogen - $(MAKE) -C ../common-env/transformers + bash install.sh .PHONY: protogen protogen: backend_pb2_grpc.py backend_pb2.py @@ -10,4 +10,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/autogptq/install.sh b/backend/python/autogptq/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/autogptq/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/autogptq/requirements.txt b/backend/python/autogptq/requirements.txt new file mode 100644 index 00000000..05dac8c8 --- /dev/null +++ b/backend/python/autogptq/requirements.txt @@ -0,0 +1,7 @@ +accelerate +auto-gptq==0.7.1 +grpcio==1.63.0 +protobuf +torch +certifi +transformers \ No newline at end of file diff --git a/backend/python/autogptq/run.sh b/backend/python/autogptq/run.sh index 15c5e0f3..ba599ddf 100755 --- a/backend/python/autogptq/run.sh +++ b/backend/python/autogptq/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the autogptq server with conda +## A bash script wrapper that runs the autogptq server -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/autogptq.py $@ +python $MY_DIR/autogptq.py $@ \ No newline at end of file diff --git a/backend/python/autogptq/test.sh b/backend/python/autogptq/test.sh new file mode 100755 index 00000000..4b742b3f --- /dev/null +++ b/backend/python/autogptq/test.sh @@ -0,0 +1,16 @@ +#!/bin/bash +## +## A bash script wrapper that runs python unittests + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +source $MY_DIR/venv/bin/activate + +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/bark/Makefile b/backend/python/bark/Makefile index a16308f7..a6ba88ce 100644 --- a/backend/python/bark/Makefile +++ b/backend/python/bark/Makefile @@ -1,6 +1,6 @@ .PHONY: ttsbark ttsbark: protogen - $(MAKE) -C ../common-env/transformers + bash install.sh .PHONY: run run: protogen @@ -22,4 +22,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/bark/install.sh b/backend/python/bark/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/bark/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/bark/requirements.txt b/backend/python/bark/requirements.txt new file mode 100644 index 00000000..2aec63f7 --- /dev/null +++ b/backend/python/bark/requirements.txt @@ -0,0 +1,6 @@ +accelerate +bark==0.1.5 +grpcio==1.63.0 +protobuf +certifi +transformers \ No newline at end of file diff --git a/backend/python/bark/run.sh b/backend/python/bark/run.sh index 7e6a36bb..76149f37 100755 --- a/backend/python/bark/run.sh +++ b/backend/python/bark/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the ttsbark server with conda +## A bash script wrapper that runs the ttsbark server -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/ttsbark.py $@ +python $MY_DIR/ttsbark.py $@ \ No newline at end of file diff --git a/backend/python/bark/test.sh b/backend/python/bark/test.sh old mode 100644 new mode 100755 index 17671068..4b742b3f --- a/backend/python/bark/test.sh +++ b/backend/python/bark/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the bark server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/common-env/transformers/install.sh b/backend/python/common-env/transformers/install.sh old mode 100644 new mode 100755 diff --git a/backend/python/coqui/Makefile b/backend/python/coqui/Makefile index 475804c9..4196d26d 100644 --- a/backend/python/coqui/Makefile +++ b/backend/python/coqui/Makefile @@ -1,6 +1,6 @@ .PHONY: coqui coqui: protogen - $(MAKE) -C ../common-env/transformers + bash install.sh .PHONY: run run: protogen @@ -22,4 +22,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/coqui/install.sh b/backend/python/coqui/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/coqui/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/coqui/requirements.txt b/backend/python/coqui/requirements.txt new file mode 100644 index 00000000..b7c7faf3 --- /dev/null +++ b/backend/python/coqui/requirements.txt @@ -0,0 +1,6 @@ +accelerate +TTS==0.22.0 +grpcio==1.63.0 +protobuf +certifi +transformers \ No newline at end of file diff --git a/backend/python/coqui/run.sh b/backend/python/coqui/run.sh index 5d306ca8..eb0b3126 100755 --- a/backend/python/coqui/run.sh +++ b/backend/python/coqui/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the ttsbark server with conda +## A bash script wrapper that runs the coqui server -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/coqui_server.py $@ +python $MY_DIR/coqui_server.py $@ \ No newline at end of file diff --git a/backend/python/coqui/test.sh b/backend/python/coqui/test.sh old mode 100644 new mode 100755 index 17671068..4b742b3f --- a/backend/python/coqui/test.sh +++ b/backend/python/coqui/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the bark server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/diffusers/Makefile b/backend/python/diffusers/Makefile index c73efdd2..beec821d 100644 --- a/backend/python/diffusers/Makefile +++ b/backend/python/diffusers/Makefile @@ -13,8 +13,7 @@ endif .PHONY: diffusers diffusers: protogen - @echo "Installing $(CONDA_ENV_PATH)..." - bash install.sh $(CONDA_ENV_PATH) + bash install.sh .PHONY: run run: protogen @@ -33,4 +32,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/diffusers/diffusers-rocm.yml b/backend/python/diffusers/diffusers-rocm.yml deleted file mode 100644 index fc1ad08c..00000000 --- a/backend/python/diffusers/diffusers-rocm.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: diffusers -channels: - - defaults -dependencies: - - _libgcc_mutex=0.1=main - - _openmp_mutex=5.1=1_gnu - - bzip2=1.0.8=h7b6447c_0 - - ca-certificates=2023.08.22=h06a4308_0 - - ld_impl_linux-64=2.38=h1181459_1 - - libffi=3.4.4=h6a678d5_0 - - libgcc-ng=11.2.0=h1234567_1 - - libgomp=11.2.0=h1234567_1 - - libstdcxx-ng=11.2.0=h1234567_1 - - libuuid=1.41.5=h5eee18b_0 - - ncurses=6.4=h6a678d5_0 - - openssl=3.0.11=h7f8727e_2 - - pip=23.2.1=py311h06a4308_0 - - python=3.11.5=h955ad1f_0 - - readline=8.2=h5eee18b_0 - - setuptools=68.0.0=py311h06a4308_0 - - sqlite=3.41.2=h5eee18b_0 - - tk=8.6.12=h1ccaba5_0 - - tzdata=2023c=h04d1e81_0 - - wheel=0.41.2=py311h06a4308_0 - - xz=5.4.2=h5eee18b_0 - - zlib=1.2.13=h5eee18b_0 - - pip: - - --pre - - --extra-index-url https://download.pytorch.org/whl/nightly/ - - accelerate>=0.11.0 - - certifi==2023.7.22 - - charset-normalizer==3.3.0 - - compel==2.0.2 - - diffusers==0.24.0 - - filelock==3.12.4 - - fsspec==2023.9.2 - - grpcio==1.63.0 - - huggingface-hub>=0.19.4 - - idna==3.4 - - importlib-metadata==6.8.0 - - jinja2==3.1.2 - - markupsafe==2.1.3 - - mpmath==1.3.0 - - networkx==3.1 - - numpy==1.26.0 - - omegaconf - - packaging==23.2 - - pillow==10.0.1 - - protobuf==4.24.4 - - psutil==5.9.5 - - pyparsing==3.1.1 - - pyyaml==6.0.1 - - regex==2023.10.3 - - requests==2.31.0 - - safetensors==0.4.0 - - sympy==1.12 - - tqdm==4.66.1 - - transformers>=4.25.1 - - triton==2.1.0 - - typing-extensions==4.8.0 - - urllib3==2.0.6 - - zipp==3.17.0 - - torch - - opencv-python -prefix: /opt/conda/envs/diffusers diff --git a/backend/python/diffusers/diffusers.yml b/backend/python/diffusers/diffusers.yml deleted file mode 100644 index 60c28db9..00000000 --- a/backend/python/diffusers/diffusers.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: diffusers -channels: - - defaults -dependencies: - - _libgcc_mutex=0.1=main - - _openmp_mutex=5.1=1_gnu - - bzip2=1.0.8=h7b6447c_0 - - ca-certificates=2023.08.22=h06a4308_0 - - ld_impl_linux-64=2.38=h1181459_1 - - libffi=3.4.4=h6a678d5_0 - - libgcc-ng=11.2.0=h1234567_1 - - libgomp=11.2.0=h1234567_1 - - libstdcxx-ng=11.2.0=h1234567_1 - - libuuid=1.41.5=h5eee18b_0 - - ncurses=6.4=h6a678d5_0 - - openssl=3.0.11=h7f8727e_2 - - pip=23.2.1=py311h06a4308_0 - - python=3.11.5=h955ad1f_0 - - readline=8.2=h5eee18b_0 - - setuptools=68.0.0=py311h06a4308_0 - - sqlite=3.41.2=h5eee18b_0 - - tk=8.6.12=h1ccaba5_0 - - tzdata=2023c=h04d1e81_0 - - wheel=0.41.2=py311h06a4308_0 - - xz=5.4.2=h5eee18b_0 - - zlib=1.2.13=h5eee18b_0 - - pip: - - accelerate>=0.11.0 - - certifi==2023.7.22 - - charset-normalizer==3.3.0 - - compel==2.0.2 - - diffusers==0.24.0 - - filelock==3.12.4 - - fsspec==2023.9.2 - - grpcio==1.63.0 - - huggingface-hub>=0.19.4 - - idna==3.4 - - importlib-metadata==6.8.0 - - jinja2==3.1.2 - - markupsafe==2.1.3 - - mpmath==1.3.0 - - networkx==3.1 - - numpy==1.26.0 - - nvidia-cublas-cu12==12.1.3.1 - - nvidia-cuda-cupti-cu12==12.1.105 - - nvidia-cuda-nvrtc-cu12==12.1.105 - - nvidia-cuda-runtime-cu12==12.1.105 - - nvidia-cudnn-cu12==8.9.2.26 - - nvidia-cufft-cu12==11.0.2.54 - - nvidia-curand-cu12==10.3.2.106 - - nvidia-cusolver-cu12==11.4.5.107 - - nvidia-cusparse-cu12==12.1.0.106 - - nvidia-nccl-cu12==2.18.1 - - nvidia-nvjitlink-cu12==12.2.140 - - nvidia-nvtx-cu12==12.1.105 - - omegaconf - - packaging==23.2 - - pillow==10.0.1 - - protobuf==4.24.4 - - psutil==5.9.5 - - pyparsing==3.1.1 - - pyyaml==6.0.1 - - regex==2023.10.3 - - requests==2.31.0 - - safetensors==0.4.0 - - sympy==1.12 - - torch==2.1.0 - - tqdm==4.66.1 - - transformers>=4.25.1 - - triton==2.1.0 - - typing-extensions==4.8.0 - - urllib3==2.0.6 - - zipp==3.17.0 - - opencv-python -prefix: /opt/conda/envs/diffusers diff --git a/backend/python/diffusers/install.sh b/backend/python/diffusers/install.sh index 0b6607dc..311203ca 100755 --- a/backend/python/diffusers/install.sh +++ b/backend/python/diffusers/install.sh @@ -1,50 +1,34 @@ #!/bin/bash set -ex -SKIP_CONDA=${SKIP_CONDA:-0} +BUILD_ISOLATION_FLAG="" -# Check if environment exist -conda_env_exists(){ - ! conda list --name "${@}" >/dev/null 2>/dev/null -} +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -if [ $SKIP_CONDA -eq 1 ]; then - echo "Skipping conda environment installation" -else - export PATH=$PATH:/opt/conda/bin - if conda_env_exists "diffusers" ; then - echo "Creating virtual environment..." - conda env create --name diffusers --file $1 - echo "Virtual environment created." - else - echo "Virtual environment already exists." - fi +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt fi if [ -d "/opt/intel" ]; then # Intel GPU: If the directory exists, we assume we are using the Intel image # https://github.com/intel/intel-extension-for-pytorch/issues/538 - pip install torch==2.1.0a0 \ - torchvision==0.16.0a0 \ - torchaudio==2.1.0a0 \ - intel-extension-for-pytorch==2.1.10+xpu \ - --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ - - pip install google-api-python-client \ - grpcio==1.63.0 \ - grpcio-tools==1.63.0 \ - diffusers==0.24.0 \ - transformers>=4.25.1 \ - accelerate \ - compel==2.0.2 \ - Pillow + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi fi if [ "$PIP_CACHE_PURGE" = true ] ; then - if [ $SKIP_CONDA -ne 1 ]; then - # Activate conda environment - source activate diffusers - fi - pip cache purge fi \ No newline at end of file diff --git a/backend/python/diffusers/requirements-intel.txt b/backend/python/diffusers/requirements-intel.txt new file mode 100644 index 00000000..979c5c18 --- /dev/null +++ b/backend/python/diffusers/requirements-intel.txt @@ -0,0 +1,3 @@ +intel-extension-for-pytorch +torchaudio +torchvision \ No newline at end of file diff --git a/backend/python/diffusers/requirements.txt b/backend/python/diffusers/requirements.txt new file mode 100644 index 00000000..84b34302 --- /dev/null +++ b/backend/python/diffusers/requirements.txt @@ -0,0 +1,10 @@ +accelerate +compel +diffusers +grpcio==1.63.0 +opencv-python +pillow +protobuf +torch +transformers +certifi \ No newline at end of file diff --git a/backend/python/diffusers/run.sh b/backend/python/diffusers/run.sh index 69b25d50..03d9c500 100755 --- a/backend/python/diffusers/run.sh +++ b/backend/python/diffusers/run.sh @@ -1,19 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the diffusers server with conda +## A bash script wrapper that runs the GRPC backend -if [ -d "/opt/intel" ]; then - # Assumes we are using the Intel oneAPI container image - # https://github.com/intel/intel-extension-for-pytorch/issues/538 - export XPU=1 -else - export PATH=$PATH:/opt/conda/bin - # Activate conda environment - source activate diffusers -fi +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python $DIR/backend_diffusers.py $@ +python $MY_DIR/backend_diffusers.py $@ \ No newline at end of file diff --git a/backend/python/diffusers/test.sh b/backend/python/diffusers/test.sh old mode 100644 new mode 100755 index 421a1921..4b742b3f --- a/backend/python/diffusers/test.sh +++ b/backend/python/diffusers/test.sh @@ -1,14 +1,16 @@ #!/bin/bash - ## -## A bash script wrapper that runs the diffusers server with conda +## A bash script wrapper that runs python unittests -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate diffusers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python -m unittest $DIR/test.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/exllama/.gitignore b/backend/python/exllama/.gitignore new file mode 100644 index 00000000..1d3a0654 --- /dev/null +++ b/backend/python/exllama/.gitignore @@ -0,0 +1 @@ +source \ No newline at end of file diff --git a/backend/python/exllama/Makefile b/backend/python/exllama/Makefile index 15623448..82001861 100644 --- a/backend/python/exllama/Makefile +++ b/backend/python/exllama/Makefile @@ -18,4 +18,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + $(RM) -r venv source \ No newline at end of file diff --git a/backend/python/exllama/install.sh b/backend/python/exllama/install.sh index 320e7f4d..10b7519d 100755 --- a/backend/python/exllama/install.sh +++ b/backend/python/exllama/install.sh @@ -1,31 +1,36 @@ #!/bin/bash set -ex -export PATH=$PATH:/opt/conda/bin +BUILD_ISOLATION_FLAG="" if [ "$BUILD_TYPE" != "cublas" ]; then echo "[exllama] Attention!!! Nvidia GPU is required - skipping installation" exit 0 fi -# Check if environment exist -conda_env_exists(){ - ! conda list --name "${@}" >/dev/null 2>/dev/null -} +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -if conda_env_exists "exllama" ; then - echo "Creating virtual environment..." - conda env create --name exllama --file $1 - echo "Virtual environment created." -else - echo "Virtual environment already exists." +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt fi -source activate exllama +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt -git clone https://github.com/turboderp/exllama $CONDA_PREFIX/exllama && pushd $CONDA_PREFIX/exllama && pip install -r requirements.txt && popd +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi -cp -rfv $CONDA_PREFIX/exllama/* ./ +git clone https://github.com/turboderp/exllama $MY_DIR/source +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt + +cp -rfv ./*py $MY_DIR/source/ if [ "$PIP_CACHE_PURGE" = true ] ; then pip cache purge diff --git a/backend/python/exllama/requirements.txt b/backend/python/exllama/requirements.txt new file mode 100644 index 00000000..784e50b0 --- /dev/null +++ b/backend/python/exllama/requirements.txt @@ -0,0 +1,6 @@ +grpcio==1.63.0 +protobuf +torch +transformers +certifi +setuptools \ No newline at end of file diff --git a/backend/python/exllama/run.sh b/backend/python/exllama/run.sh index 95fedb6d..70024770 100755 --- a/backend/python/exllama/run.sh +++ b/backend/python/exllama/run.sh @@ -1,15 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the exllama server with conda -export PATH=$PATH:/opt/conda/bin +## A bash script wrapper that runs the exllama server with uv -# Activate conda environment -source activate exllama +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -cd $DIR - -python $DIR/exllama.py $@ +python $MY_DIR/source/exllama.py $@ \ No newline at end of file diff --git a/backend/python/exllama/test.sh b/backend/python/exllama/test.sh new file mode 100755 index 00000000..4b742b3f --- /dev/null +++ b/backend/python/exllama/test.sh @@ -0,0 +1,16 @@ +#!/bin/bash +## +## A bash script wrapper that runs python unittests + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +source $MY_DIR/venv/bin/activate + +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/exllama2/.gitignore b/backend/python/exllama2/.gitignore new file mode 100644 index 00000000..1d3a0654 --- /dev/null +++ b/backend/python/exllama2/.gitignore @@ -0,0 +1 @@ +source \ No newline at end of file diff --git a/backend/python/exllama2/Makefile b/backend/python/exllama2/Makefile index 6d6776b7..09db200d 100644 --- a/backend/python/exllama2/Makefile +++ b/backend/python/exllama2/Makefile @@ -1,6 +1,5 @@ .PHONY: exllama2 exllama2: protogen - $(MAKE) -C ../common-env/transformers bash install.sh .PHONY: run @@ -17,4 +16,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + $(RM) -r venv source \ No newline at end of file diff --git a/backend/python/exllama2/exllama2.yml b/backend/python/exllama2/exllama2.yml deleted file mode 100644 index 678d36a5..00000000 --- a/backend/python/exllama2/exllama2.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: exllama2 -channels: - - defaults -dependencies: - - _libgcc_mutex=0.1=main - - _openmp_mutex=5.1=1_gnu - - bzip2=1.0.8=h7b6447c_0 - - ca-certificates=2023.08.22=h06a4308_0 - - ld_impl_linux-64=2.38=h1181459_1 - - libffi=3.4.4=h6a678d5_0 - - libgcc-ng=11.2.0=h1234567_1 - - libgomp=11.2.0=h1234567_1 - - libstdcxx-ng=11.2.0=h1234567_1 - - libuuid=1.41.5=h5eee18b_0 - - ncurses=6.4=h6a678d5_0 - - openssl=3.0.11=h7f8727e_2 - - pip=23.2.1=py311h06a4308_0 - - python=3.11.5=h955ad1f_0 - - readline=8.2=h5eee18b_0 - - setuptools=68.0.0=py311h06a4308_0 - - sqlite=3.41.2=h5eee18b_0 - - tk=8.6.12=h1ccaba5_0 - - tzdata=2023c=h04d1e81_0 - - wheel=0.41.2=py311h06a4308_0 - - xz=5.4.2=h5eee18b_0 - - zlib=1.2.13=h5eee18b_0 - - pip: - - filelock==3.12.4 - - fsspec==2023.9.2 - - grpcio==1.63.0 - - markupsafe==2.1.3 - - mpmath==1.3.0 - - networkx==3.1 - - protobuf==4.24.4 - - nvidia-cublas-cu12==12.1.3.1 - - nvidia-cuda-cupti-cu12==12.1.105 - - nvidia-cuda-nvrtc-cu12==12.1.105 - - nvidia-cuda-runtime-cu12==12.1.105 - - nvidia-cudnn-cu12==8.9.2.26 - - nvidia-cufft-cu12==11.0.2.54 - - nvidia-curand-cu12==10.3.2.106 - - nvidia-cusolver-cu12==11.4.5.107 - - nvidia-cusparse-cu12==12.1.0.106 - - nvidia-nccl-cu12==2.18.1 - - nvidia-nvjitlink-cu12==12.2.140 - - nvidia-nvtx-cu12==12.1.105 - - pandas - - numpy - - ninja - - fastparquet - - torch>=2.1.0 - - safetensors>=0.3.2 - - sentencepiece>=0.1.97 - - pygments - - websockets - - regex -prefix: /opt/conda/envs/exllama2 diff --git a/backend/python/exllama2/install.sh b/backend/python/exllama2/install.sh index 858685b0..3bb030fb 100755 --- a/backend/python/exllama2/install.sh +++ b/backend/python/exllama2/install.sh @@ -2,30 +2,42 @@ set -e ## ## A bash script installs the required dependencies of VALL-E-X and prepares the environment -export SHA=c0ddebaaaf8ffd1b3529c2bb654e650bce2f790f +EXLLAMA2_VERSION=c0ddebaaaf8ffd1b3529c2bb654e650bce2f790f + +BUILD_ISOLATION_FLAG="" if [ "$BUILD_TYPE" != "cublas" ]; then - echo "[exllamav2] Attention!!! Nvidia GPU is required - skipping installation" + echo "[exllama] Attention!!! Nvidia GPU is required - skipping installation" exit 0 fi -export PATH=$PATH:/opt/conda/bin -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -echo $CONDA_PREFIX +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate -git clone https://github.com/turboderp/exllamav2 $CONDA_PREFIX/exllamav2 +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi -pushd $CONDA_PREFIX/exllamav2 +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt -git checkout -b build $SHA +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi -# TODO: this needs to be pinned within the conda environments -pip install -r requirements.txt +git clone https://github.com/turboderp/exllamav2 $MY_DIR/source +pushd ${MY_DIR}/source && git checkout -b build ${EXLLAMA2_VERSION} && popd -popd +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt +# This installs exllamav2 in JIT mode so it will compile the appropriate torch extension at runtime +EXLLAMA_NOCOMPILE= uv pip install ${BUILD_ISOLATION_FLAG} ${MY_DIR}/source/ -cp -rfv $CONDA_PREFIX/exllamav2/* ./ +cp -rfv ./*py $MY_DIR/source/ if [ "$PIP_CACHE_PURGE" = true ] ; then pip cache purge diff --git a/backend/python/exllama2/requirements-install.txt b/backend/python/exllama2/requirements-install.txt new file mode 100644 index 00000000..322799ff --- /dev/null +++ b/backend/python/exllama2/requirements-install.txt @@ -0,0 +1,4 @@ +# This is here to trigger the install script to add --no-build-isolation to the uv pip install commands +# exllama2 does not specify it's build requirements per PEP517, so we need to provide some things ourselves +wheel +setuptools \ No newline at end of file diff --git a/backend/python/exllama2/requirements.txt b/backend/python/exllama2/requirements.txt new file mode 100644 index 00000000..25cd1e65 --- /dev/null +++ b/backend/python/exllama2/requirements.txt @@ -0,0 +1,7 @@ +accelerate +grpcio==1.63.0 +protobuf +certifi +torch +wheel +setuptools \ No newline at end of file diff --git a/backend/python/exllama2/run.sh b/backend/python/exllama2/run.sh index f11b6b9a..cb390a31 100755 --- a/backend/python/exllama2/run.sh +++ b/backend/python/exllama2/run.sh @@ -1,16 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the exllama server with conda +## A bash script wrapper that runs the exllama2 server -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -cd $DIR - -python $DIR/exllama2_backend.py $@ +python $MY_DIR/source/exllama2_backend.py $@ \ No newline at end of file diff --git a/backend/python/exllama2/test.sh b/backend/python/exllama2/test.sh new file mode 100755 index 00000000..4b742b3f --- /dev/null +++ b/backend/python/exllama2/test.sh @@ -0,0 +1,16 @@ +#!/bin/bash +## +## A bash script wrapper that runs python unittests + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +source $MY_DIR/venv/bin/activate + +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/mamba/Makefile b/backend/python/mamba/Makefile index ca18e609..47c42369 100644 --- a/backend/python/mamba/Makefile +++ b/backend/python/mamba/Makefile @@ -1,7 +1,6 @@ .PHONY: mamba mamba: protogen - $(MAKE) -C ../common-env/transformers - bash install.sh + bash install.sh .PHONY: run run: protogen @@ -23,4 +22,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + $(RM) -r venv \ No newline at end of file diff --git a/backend/python/mamba/install.sh b/backend/python/mamba/install.sh index 4ef26ece..84612960 100755 --- a/backend/python/mamba/install.sh +++ b/backend/python/mamba/install.sh @@ -1,21 +1,38 @@ #!/bin/bash -set -e -## -## A bash script installs the required dependencies of VALL-E-X and prepares the environment +set -ex if [ "$BUILD_TYPE" != "cublas" ]; then echo "[mamba] Attention!!! nvcc is required - skipping installation" exit 0 fi -export PATH=$PATH:/opt/conda/bin +BUILD_ISOLATION_FLAG="" -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -echo $CONDA_PREFIX +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate -pip install causal-conv1d==1.0.0 mamba-ssm==1.0.1 +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi if [ "$PIP_CACHE_PURGE" = true ] ; then pip cache purge diff --git a/backend/python/mamba/requirements-install.txt b/backend/python/mamba/requirements-install.txt new file mode 100644 index 00000000..e937855d --- /dev/null +++ b/backend/python/mamba/requirements-install.txt @@ -0,0 +1,7 @@ +# mabma does not specify it's build dependencies per PEP517, so we need to disable build isolation +# this also means that we need to install the basic build dependencies into the venv ourselves +# https://github.com/Dao-AILab/causal-conv1d/issues/24 +packaging +setuptools +wheel +torch==2.2.0 \ No newline at end of file diff --git a/backend/python/mamba/requirements.txt b/backend/python/mamba/requirements.txt new file mode 100644 index 00000000..f8225c1c --- /dev/null +++ b/backend/python/mamba/requirements.txt @@ -0,0 +1,6 @@ +causal-conv1d==1.2.0.post2 +mamba-ssm==1.2.0.post1 +grpcio==1.63.0 +protobuf +certifi +transformers \ No newline at end of file diff --git a/backend/python/mamba/run.sh b/backend/python/mamba/run.sh index 3fee2931..54d19970 100755 --- a/backend/python/mamba/run.sh +++ b/backend/python/mamba/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the diffusers server with conda +## A bash script wrapper that runs the GRPC server -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/backend_mamba.py $@ \ No newline at end of file +python $MY_DIR/backend_mamba.py $@ \ No newline at end of file diff --git a/backend/python/vllm/test_backend_vllm.py b/backend/python/mamba/test.py similarity index 94% rename from backend/python/vllm/test_backend_vllm.py rename to backend/python/mamba/test.py index 7760f816..92dde016 100644 --- a/backend/python/vllm/test_backend_vllm.py +++ b/backend/python/mamba/test.py @@ -20,7 +20,7 @@ class TestBackendServicer(unittest.TestCase): This class contains methods to test the startup and shutdown of the gRPC service. """ def setUp(self): - self.service = subprocess.Popen(["python", "backend_vllm.py", "--addr", "localhost:50051"]) + self.service = subprocess.Popen(["python", "backend_mamba.py", "--addr", "localhost:50051"]) time.sleep(10) def tearDown(self) -> None: diff --git a/backend/python/mamba/test.sh b/backend/python/mamba/test.sh old mode 100644 new mode 100755 index b1ff5591..4b742b3f --- a/backend/python/mamba/test.sh +++ b/backend/python/mamba/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_backend_mamba.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/parler-tts/Makefile b/backend/python/parler-tts/Makefile index 4497762e..590401f3 100644 --- a/backend/python/parler-tts/Makefile +++ b/backend/python/parler-tts/Makefile @@ -36,4 +36,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + $(RM) -r venv \ No newline at end of file diff --git a/backend/python/parler-tts/install.sh b/backend/python/parler-tts/install.sh index b9965b23..2d60fc35 100755 --- a/backend/python/parler-tts/install.sh +++ b/backend/python/parler-tts/install.sh @@ -1,39 +1,39 @@ #!/bin/bash set -ex -SKIP_CONDA=${SKIP_CONDA:-0} +BUILD_ISOLATION_FLAG="" -# Check if environment exist -conda_env_exists(){ - ! conda list --name "${@}" >/dev/null 2>/dev/null -} +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -if [ $SKIP_CONDA -eq 1 ]; then - echo "Skipping conda environment installation" -else - export PATH=$PATH:/opt/conda/bin - if conda_env_exists "parler" ; then - echo "Creating virtual environment..." - conda env create --name parler --file $1 - echo "Virtual environment created." - else - echo "Virtual environment already exists." +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt fi fi -if [ $SKIP_CONDA -ne 1 ]; then - # Activate conda environment - source activate parler - # https://github.com/descriptinc/audiotools/issues/101 - # incompatible protobuf versions. - curl -L https://raw.githubusercontent.com/protocolbuffers/protobuf/main/python/google/protobuf/internal/builder.py -o $CONDA_PREFIX/lib/python3.11/site-packages/google/protobuf/internal/builder.py -fi +# https://github.com/descriptinc/audiotools/issues/101 +# incompatible protobuf versions. +PYDIR=$(ls $MY_DIR/venv/lib) +curl -L https://raw.githubusercontent.com/protocolbuffers/protobuf/main/python/google/protobuf/internal/builder.py -o $MY_DIR/venv/lib/$PYDIR/site-packages/google/protobuf/internal/builder.py if [ "$PIP_CACHE_PURGE" = true ] ; then - if [ $SKIP_CONDA -ne 1 ]; then - # Activate conda environment - source activate parler - fi - pip cache purge fi \ No newline at end of file diff --git a/backend/python/parler-tts/requirements.txt b/backend/python/parler-tts/requirements.txt new file mode 100644 index 00000000..31085e3d --- /dev/null +++ b/backend/python/parler-tts/requirements.txt @@ -0,0 +1,7 @@ +accelerate +grpcio==1.63.0 +protobuf +torch +git+https://github.com/huggingface/parler-tts.git@10016fb0300c0dc31a0fb70e26f3affee7b62f16 +certifi +transformers \ No newline at end of file diff --git a/backend/python/parler-tts/run.sh b/backend/python/parler-tts/run.sh old mode 100644 new mode 100755 index 08e42198..76425379 --- a/backend/python/parler-tts/run.sh +++ b/backend/python/parler-tts/run.sh @@ -1,16 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the parler-tts server with conda +## A bash script wrapper that runs the GRPC backend -echo "Launching gRPC server for parler-tts" +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -export PATH=$PATH:/opt/conda/bin +source $MY_DIR/venv/bin/activate -# Activate conda environment -source activate parler - -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/parler_tts_server.py $@ +python $MY_DIR/parler_tts_server.py $@ \ No newline at end of file diff --git a/backend/python/parler-tts/test_parler.py b/backend/python/parler-tts/test.py similarity index 100% rename from backend/python/parler-tts/test_parler.py rename to backend/python/parler-tts/test.py diff --git a/backend/python/parler-tts/test.sh b/backend/python/parler-tts/test.sh old mode 100644 new mode 100755 index 1bd15fd1..4b742b3f --- a/backend/python/parler-tts/test.sh +++ b/backend/python/parler-tts/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate parler +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_parler.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/petals/Makefile b/backend/python/petals/Makefile index 0ed64a07..af32e845 100644 --- a/backend/python/petals/Makefile +++ b/backend/python/petals/Makefile @@ -24,4 +24,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/petals/install.sh b/backend/python/petals/install.sh old mode 100644 new mode 100755 index 97bcbb8a..311203ca --- a/backend/python/petals/install.sh +++ b/backend/python/petals/install.sh @@ -1,5 +1,34 @@ #!/bin/bash +set -ex -export PATH=$PATH:/opt/conda/bin +BUILD_ISOLATION_FLAG="" -conda env create --name petals --file $1 \ No newline at end of file +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/petals/requirements.txt b/backend/python/petals/requirements.txt new file mode 100644 index 00000000..10f5114e --- /dev/null +++ b/backend/python/petals/requirements.txt @@ -0,0 +1,3 @@ +git+https://github.com/bigscience-workshop/petals +certifi +transformers \ No newline at end of file diff --git a/backend/python/petals/run.sh b/backend/python/petals/run.sh index 3713b9bb..87bf1fa9 100755 --- a/backend/python/petals/run.sh +++ b/backend/python/petals/run.sh @@ -1,23 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the exllama server with conda +## A bash script wrapper that runs the GRPC backend -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -CONDA_ENV=petals +source $MY_DIR/venv/bin/activate -# Activate conda environment -# if source is available use it, or use conda -# -if [ -f /opt/conda/bin/activate ]; then - source activate $CONDA_ENV -else - eval "$(conda shell.bash hook)" - conda activate $CONDA_ENV -fi - -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/backend_petals.py $@ +python $MY_DIR/backend_petals.py $@ \ No newline at end of file diff --git a/backend/python/petals/test_petals.py b/backend/python/petals/test.py similarity index 100% rename from backend/python/petals/test_petals.py rename to backend/python/petals/test.py diff --git a/backend/python/petals/test.sh b/backend/python/petals/test.sh old mode 100644 new mode 100755 index ed2aa3b3..4b742b3f --- a/backend/python/petals/test.sh +++ b/backend/python/petals/test.sh @@ -1,20 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -CONDA_ENV=petals -# Activate conda environment -# if source is available use it, or use conda -# -if [ -f /opt/conda/bin/activate ]; then - source activate $CONDA_ENV +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +source $MY_DIR/venv/bin/activate + +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd else - eval "$(conda shell.bash hook)" - conda activate $CONDA_ENV -fi - -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python -m unittest $DIR/test_petals.py \ No newline at end of file + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/rerankers/Makefile b/backend/python/rerankers/Makefile index f029c841..0c812047 100644 --- a/backend/python/rerankers/Makefile +++ b/backend/python/rerankers/Makefile @@ -1,7 +1,6 @@ .PHONY: rerankers rerankers: protogen - $(MAKE) -C ../common-env/transformers - + bash install.sh .PHONY: run run: protogen @@ -24,4 +23,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/rerankers/install.sh b/backend/python/rerankers/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/rerankers/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/rerankers/requirements.txt b/backend/python/rerankers/requirements.txt new file mode 100644 index 00000000..c4eac2f4 --- /dev/null +++ b/backend/python/rerankers/requirements.txt @@ -0,0 +1,6 @@ +accelerate +rerankers[transformers] +grpcio==1.63.0 +protobuf +certifi +transformers \ No newline at end of file diff --git a/backend/python/rerankers/run.sh b/backend/python/rerankers/run.sh index 16d8a0bd..b7f6384c 100755 --- a/backend/python/rerankers/run.sh +++ b/backend/python/rerankers/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the reranker server with conda +## A bash script wrapper that runs the GRPC backend -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/reranker.py $@ +python $MY_DIR/reranker.py $@ \ No newline at end of file diff --git a/backend/python/rerankers/test_reranker.py b/backend/python/rerankers/test.py similarity index 100% rename from backend/python/rerankers/test_reranker.py rename to backend/python/rerankers/test.py diff --git a/backend/python/rerankers/test.sh b/backend/python/rerankers/test.sh index 75316829..4b742b3f 100755 --- a/backend/python/rerankers/test.sh +++ b/backend/python/rerankers/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the reranker server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_reranker.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/sentencetransformers/Makefile b/backend/python/sentencetransformers/Makefile index ac442897..5fa6acd7 100644 --- a/backend/python/sentencetransformers/Makefile +++ b/backend/python/sentencetransformers/Makefile @@ -1,6 +1,6 @@ .PHONY: sentencetransformers sentencetransformers: protogen - $(MAKE) -C ../common-env/transformers + bash ./install.sh .PHONY: run @@ -24,4 +24,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/sentencetransformers/install.sh b/backend/python/sentencetransformers/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/sentencetransformers/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/sentencetransformers/requirements.txt b/backend/python/sentencetransformers/requirements.txt new file mode 100644 index 00000000..b00dc058 --- /dev/null +++ b/backend/python/sentencetransformers/requirements.txt @@ -0,0 +1,6 @@ +accelerate +sentence-transformers==2.5.1 +transformers +grpcio==1.63.0 +protobuf +certifi \ No newline at end of file diff --git a/backend/python/sentencetransformers/run.sh b/backend/python/sentencetransformers/run.sh index 36af1a56..c2b04d8d 100755 --- a/backend/python/sentencetransformers/run.sh +++ b/backend/python/sentencetransformers/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the sentencetransformers server with conda +## A bash script wrapper that runs the GRPC backend -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/sentencetransformers.py $@ +python $MY_DIR/sentencetransformers.py $@ \ No newline at end of file diff --git a/backend/python/sentencetransformers/test_sentencetransformers.py b/backend/python/sentencetransformers/test.py similarity index 100% rename from backend/python/sentencetransformers/test_sentencetransformers.py rename to backend/python/sentencetransformers/test.py diff --git a/backend/python/sentencetransformers/test.sh b/backend/python/sentencetransformers/test.sh old mode 100644 new mode 100755 index 8dc70c9e..4b742b3f --- a/backend/python/sentencetransformers/test.sh +++ b/backend/python/sentencetransformers/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the sentencetransformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_sentencetransformers.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/transformers-musicgen/Makefile b/backend/python/transformers-musicgen/Makefile index e28a356d..d45b4cb4 100644 --- a/backend/python/transformers-musicgen/Makefile +++ b/backend/python/transformers-musicgen/Makefile @@ -1,6 +1,6 @@ .PHONY: transformers-musicgen transformers-musicgen: protogen - $(MAKE) -C ../common-env/transformers + bash install.sh .PHONY: run run: protogen @@ -22,4 +22,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/transformers-musicgen/install.sh b/backend/python/transformers-musicgen/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/transformers-musicgen/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/transformers-musicgen/requirements.txt b/backend/python/transformers-musicgen/requirements.txt new file mode 100644 index 00000000..8f6d7be6 --- /dev/null +++ b/backend/python/transformers-musicgen/requirements.txt @@ -0,0 +1,7 @@ +accelerate +transformers +grpcio==1.63.0 +protobuf +torch +scipy==1.13.0 +certifi \ No newline at end of file diff --git a/backend/python/transformers-musicgen/run.sh b/backend/python/transformers-musicgen/run.sh old mode 100644 new mode 100755 index 3d3ffcfd..2087d6c8 --- a/backend/python/transformers-musicgen/run.sh +++ b/backend/python/transformers-musicgen/run.sh @@ -1,16 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers-musicgen server with conda +## A bash script wrapper that runs the GRPC backend -echo "Launching gRPC server for transformers-musicgen" +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -export PATH=$PATH:/opt/conda/bin +source $MY_DIR/venv/bin/activate -# Activate conda environment -source activate transformers - -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/transformers_server.py $@ +python $MY_DIR/transformers_server.py $@ \ No newline at end of file diff --git a/backend/python/transformers-musicgen/test_transformers.py b/backend/python/transformers-musicgen/test.py similarity index 100% rename from backend/python/transformers-musicgen/test_transformers.py rename to backend/python/transformers-musicgen/test.py diff --git a/backend/python/transformers-musicgen/test.sh b/backend/python/transformers-musicgen/test.sh old mode 100644 new mode 100755 index a440f809..4b742b3f --- a/backend/python/transformers-musicgen/test.sh +++ b/backend/python/transformers-musicgen/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_transformers.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/transformers/Makefile b/backend/python/transformers/Makefile index afe48405..b3593c50 100644 --- a/backend/python/transformers/Makefile +++ b/backend/python/transformers/Makefile @@ -1,6 +1,6 @@ .PHONY: transformers transformers: protogen - $(MAKE) -C ../common-env/transformers + bash install.sh .PHONY: run run: protogen @@ -23,4 +23,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/transformers/install.sh b/backend/python/transformers/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/transformers/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/transformers/requirements.txt b/backend/python/transformers/requirements.txt new file mode 100644 index 00000000..57ceb96d --- /dev/null +++ b/backend/python/transformers/requirements.txt @@ -0,0 +1,6 @@ +accelerate +transformers +grpcio==1.63.0 +protobuf +torch +certifi \ No newline at end of file diff --git a/backend/python/transformers/run.sh b/backend/python/transformers/run.sh index d09c1f5c..23899c0f 100755 --- a/backend/python/transformers/run.sh +++ b/backend/python/transformers/run.sh @@ -1,20 +1,17 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs the GRPC backend if [ -d "/opt/intel" ]; then # Assumes we are using the Intel oneAPI container image # https://github.com/intel/intel-extension-for-pytorch/issues/538 export XPU=1 -else - export PATH=$PATH:/opt/conda/bin - # Activate conda environment - source activate transformers fi -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -python $DIR/transformers_server.py $@ +source $MY_DIR/venv/bin/activate + +python $MY_DIR/transformers_server.py $@ \ No newline at end of file diff --git a/backend/python/transformers/test_transformers_server.py b/backend/python/transformers/test.py similarity index 100% rename from backend/python/transformers/test_transformers_server.py rename to backend/python/transformers/test.py diff --git a/backend/python/transformers/test.sh b/backend/python/transformers/test.sh old mode 100644 new mode 100755 index 46e55f01..4b742b3f --- a/backend/python/transformers/test.sh +++ b/backend/python/transformers/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_transformers_server.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/vall-e-x/.gitignore b/backend/python/vall-e-x/.gitignore new file mode 100644 index 00000000..1d3a0654 --- /dev/null +++ b/backend/python/vall-e-x/.gitignore @@ -0,0 +1 @@ +source \ No newline at end of file diff --git a/backend/python/vall-e-x/Makefile b/backend/python/vall-e-x/Makefile index d7a80e55..0f6e377f 100644 --- a/backend/python/vall-e-x/Makefile +++ b/backend/python/vall-e-x/Makefile @@ -4,7 +4,6 @@ endif .PHONY: ttsvalle ttsvalle: protogen - $(MAKE) -C ../common-env/transformers bash install.sh .PHONY: run @@ -27,4 +26,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf source venv \ No newline at end of file diff --git a/backend/python/vall-e-x/install.sh b/backend/python/vall-e-x/install.sh old mode 100644 new mode 100755 index a9c4117e..82170be6 --- a/backend/python/vall-e-x/install.sh +++ b/backend/python/vall-e-x/install.sh @@ -1,21 +1,39 @@ #!/bin/bash +set -ex -## -## A bash script installs the required dependencies of VALL-E-X and prepares the environment -export SHA=3faaf8ccadb154d63b38070caf518ce9309ea0f4 +BUILD_ISOLATION_FLAG="" -SKIP_CONDA=${SKIP_CONDA:-0} +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -if [ $SKIP_CONDA -ne 1 ]; then - source activate transformers -else - export PATH=$PATH:/opt/conda/bin - CONDA_PREFIX=$PWD +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt fi -git clone https://github.com/Plachtaa/VALL-E-X.git $CONDA_PREFIX/vall-e-x && pushd $CONDA_PREFIX/vall-e-x && git checkout -b build $SHA && popd +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi -cp -rfv $CONDA_PREFIX/vall-e-x/* ./ +git clone https://github.com/Plachtaa/VALL-E-X.git $MY_DIR/source +pushd $MY_DIR/source && git checkout -b build $VALL_E_X_VERSION && popd +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/source/requirements.txt + +cp -rfv ./*py $MY_DIR/source/ if [ "$PIP_CACHE_PURGE" = true ] ; then pip cache purge diff --git a/backend/python/vall-e-x/requirements.txt b/backend/python/vall-e-x/requirements.txt new file mode 100644 index 00000000..075c6231 --- /dev/null +++ b/backend/python/vall-e-x/requirements.txt @@ -0,0 +1,4 @@ +accelerate +grpcio==1.63.0 +protobuf +certifi \ No newline at end of file diff --git a/backend/python/vall-e-x/run.sh b/backend/python/vall-e-x/run.sh index 4fde705e..ce316a12 100755 --- a/backend/python/vall-e-x/run.sh +++ b/backend/python/vall-e-x/run.sh @@ -1,15 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the ttsvalle server with conda -export PATH=$PATH:/opt/conda/bin +## A bash script wrapper that runs the GRPC backend -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -cd $DIR - -python $DIR/ttsvalle.py $@ \ No newline at end of file +pushd $MY_DIR/source && python ttsvalle.py $@ \ No newline at end of file diff --git a/backend/python/vall-e-x/test.sh b/backend/python/vall-e-x/test.sh old mode 100644 new mode 100755 index 6184966d..91c6477d --- a/backend/python/vall-e-x/test.sh +++ b/backend/python/vall-e-x/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the ttsvalle server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR}/source + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file diff --git a/backend/python/vllm/Makefile b/backend/python/vllm/Makefile index 3e1fdd77..79bff60e 100644 --- a/backend/python/vllm/Makefile +++ b/backend/python/vllm/Makefile @@ -1,6 +1,6 @@ .PHONY: vllm vllm: protogen - $(MAKE) -C ../common-env/transformers + bash install.sh .PHONY: run run: protogen @@ -22,4 +22,8 @@ protogen-clean: $(RM) backend_pb2_grpc.py backend_pb2.py backend_pb2_grpc.py backend_pb2.py: - python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto \ No newline at end of file + python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto + +.PHONY: clean +clean: protogen-clean + rm -rf venv \ No newline at end of file diff --git a/backend/python/vllm/install.sh b/backend/python/vllm/install.sh new file mode 100755 index 00000000..311203ca --- /dev/null +++ b/backend/python/vllm/install.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -ex + +BUILD_ISOLATION_FLAG="" + +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" + +uv venv ${MY_DIR}/venv +source ${MY_DIR}/venv/bin/activate + +if [ -f "requirements-install.txt" ]; then + # If we have a requirements-install.txt, it means that a package does not properly declare it's build time + # dependencies per PEP-517, so we have to set up the proper build environment ourselves, and then install + # the package without build isolation + BUILD_ISOLATION_FLAG="--no-build-isolation" + uv pip install --requirement ${MY_DIR}/requirements-install.txt +fi +uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements.txt + +if [ -f "requirements-${BUILD_TYPE}.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --requirement ${MY_DIR}/requirements-${BUILD_TYPE}.txt +fi + +if [ -d "/opt/intel" ]; then + # Intel GPU: If the directory exists, we assume we are using the Intel image + # https://github.com/intel/intel-extension-for-pytorch/issues/538 + if [ -f "requirements-intel.txt" ]; then + uv pip install ${BUILD_ISOLATION_FLAG} --index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ --requirement ${MY_DIR}/requirements-intel.txt + fi +fi + +if [ "$PIP_CACHE_PURGE" = true ] ; then + pip cache purge +fi \ No newline at end of file diff --git a/backend/python/vllm/requirements-cublas.txt b/backend/python/vllm/requirements-cublas.txt new file mode 100644 index 00000000..7bfe8efe --- /dev/null +++ b/backend/python/vllm/requirements-cublas.txt @@ -0,0 +1 @@ +flash-attn \ No newline at end of file diff --git a/backend/python/vllm/requirements-install.txt b/backend/python/vllm/requirements-install.txt new file mode 100644 index 00000000..69d263f0 --- /dev/null +++ b/backend/python/vllm/requirements-install.txt @@ -0,0 +1,6 @@ +# mabma does not specify it's build dependencies per PEP517, so we need to disable build isolation +# this also means that we need to install the basic build dependencies into the venv ourselves +# https://github.com/Dao-AILab/causal-conv1d/issues/24 +packaging +setuptools +wheel \ No newline at end of file diff --git a/backend/python/vllm/requirements.txt b/backend/python/vllm/requirements.txt new file mode 100644 index 00000000..3de0b4e2 --- /dev/null +++ b/backend/python/vllm/requirements.txt @@ -0,0 +1,7 @@ +accelerate +vllm +grpcio==1.63.0 +protobuf +certifi +transformers +setuptools \ No newline at end of file diff --git a/backend/python/vllm/run.sh b/backend/python/vllm/run.sh index 5d0084a7..34127e89 100755 --- a/backend/python/vllm/run.sh +++ b/backend/python/vllm/run.sh @@ -1,14 +1,10 @@ #!/bin/bash ## -## A bash script wrapper that runs the diffusers server with conda +## A bash script wrapper that runs the GRPC backend -export PATH=$PATH:/opt/conda/bin +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# Activate conda environment -source activate transformers +source $MY_DIR/venv/bin/activate -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" - -python $DIR/backend_vllm.py $@ \ No newline at end of file +python $MY_DIR/backend_vllm.py $@ \ No newline at end of file diff --git a/backend/python/mamba/test_backend_mamba.py b/backend/python/vllm/test.py similarity index 100% rename from backend/python/mamba/test_backend_mamba.py rename to backend/python/vllm/test.py diff --git a/backend/python/vllm/test.sh b/backend/python/vllm/test.sh old mode 100644 new mode 100755 index 07363ad8..4b742b3f --- a/backend/python/vllm/test.sh +++ b/backend/python/vllm/test.sh @@ -1,11 +1,16 @@ #!/bin/bash ## -## A bash script wrapper that runs the transformers server with conda +## A bash script wrapper that runs python unittests -# Activate conda environment -source activate transformers +MY_DIR="$(dirname -- "${BASH_SOURCE[0]}")" -# get the directory where the bash script is located -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source $MY_DIR/venv/bin/activate -python -m unittest $DIR/test_backend_vllm.py \ No newline at end of file +if [ -f "${MY_DIR}/test.py" ]; then + pushd ${MY_DIR} + python -m unittest test.py + popd +else + echo "ERROR: No tests defined for backend!" + exit 1 +fi \ No newline at end of file