mirror of
https://github.com/mudler/LocalAI.git
synced 2025-01-16 01:39:49 +00:00
2553de0187
* feat(vllm): add support for image-to-text Related to https://github.com/mudler/LocalAI/issues/3670 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * feat(vllm): add support for video-to-text Closes: https://github.com/mudler/LocalAI/issues/2318 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * feat(vllm): support CPU installations Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * feat(vllm): add bnb Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * chore: add docs reference Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Apply suggestions from code review Signed-off-by: Ettore Di Giacinto <mudler@users.noreply.github.com> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io> Signed-off-by: Ettore Di Giacinto <mudler@users.noreply.github.com>
31 lines
1.4 KiB
Bash
Executable File
31 lines
1.4 KiB
Bash
Executable File
#!/bin/bash
|
|
set -e
|
|
|
|
EXTRA_PIP_INSTALL_FLAGS="--no-build-isolation"
|
|
|
|
source $(dirname $0)/../common/libbackend.sh
|
|
|
|
# This is here because the Intel pip index is broken and returns 200 status codes for every package name, it just doesn't return any package links.
|
|
# This makes uv think that the package exists in the Intel pip index, and by default it stops looking at other pip indexes once it finds a match.
|
|
# We need uv to continue falling through to the pypi default index to find optimum[openvino] in the pypi index
|
|
# the --upgrade actually allows us to *downgrade* torch to the version provided in the Intel pip index
|
|
if [ "x${BUILD_PROFILE}" == "xintel" ]; then
|
|
EXTRA_PIP_INSTALL_FLAGS+=" --upgrade --index-strategy=unsafe-first-match"
|
|
fi
|
|
|
|
if [ "x${BUILD_TYPE}" == "x" ]; then
|
|
ensureVenv
|
|
# https://docs.vllm.ai/en/v0.6.1/getting_started/cpu-installation.html
|
|
if [ ! -d vllm ]; then
|
|
git clone https://github.com/vllm-project/vllm
|
|
fi
|
|
pushd vllm
|
|
uv pip install wheel packaging ninja "setuptools>=49.4.0" numpy typing-extensions pillow setuptools-scm grpcio==1.66.2 protobuf bitsandbytes
|
|
uv pip install -v -r requirements-cpu.txt --extra-index-url https://download.pytorch.org/whl/cpu
|
|
VLLM_TARGET_DEVICE=cpu python setup.py install
|
|
popd
|
|
rm -rf vllm
|
|
else
|
|
installRequirements
|
|
fi
|