2023-12-21 07:35:15 +00:00
|
|
|
#!/bin/bash
|
|
|
|
set -ex
|
|
|
|
|
2024-03-07 13:37:45 +00:00
|
|
|
SKIP_CONDA=${SKIP_CONDA:-0}
|
2024-04-11 07:44:39 +00:00
|
|
|
REQUIREMENTS_FILE=$1
|
2024-03-07 13:37:45 +00:00
|
|
|
|
2023-12-21 07:35:15 +00:00
|
|
|
# Check if environment exist
|
|
|
|
conda_env_exists(){
|
|
|
|
! conda list --name "${@}" >/dev/null 2>/dev/null
|
|
|
|
}
|
|
|
|
|
2024-03-07 13:37:45 +00:00
|
|
|
if [ $SKIP_CONDA -eq 1 ]; then
|
|
|
|
echo "Skipping conda environment installation"
|
|
|
|
else
|
|
|
|
export PATH=$PATH:/opt/conda/bin
|
|
|
|
if conda_env_exists "transformers" ; then
|
|
|
|
echo "Creating virtual environment..."
|
2024-04-11 07:44:39 +00:00
|
|
|
conda env create --name transformers --file $REQUIREMENTS_FILE
|
2024-03-07 13:37:45 +00:00
|
|
|
echo "Virtual environment created."
|
|
|
|
else
|
|
|
|
echo "Virtual environment already exists."
|
|
|
|
fi
|
2023-12-21 07:35:15 +00:00
|
|
|
fi
|
2024-01-07 23:37:02 +00:00
|
|
|
|
2024-03-07 13:37:45 +00:00
|
|
|
if [ -d "/opt/intel" ]; then
|
|
|
|
# Intel GPU: If the directory exists, we assume we are using the intel image
|
|
|
|
# (no conda env)
|
|
|
|
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
2024-03-27 16:50:35 +00:00
|
|
|
pip install intel-extension-for-transformers datasets sentencepiece tiktoken neural_speed optimum[openvino]
|
2024-03-07 13:37:45 +00:00
|
|
|
fi
|
2024-01-07 23:37:02 +00:00
|
|
|
|
2024-04-11 07:44:39 +00:00
|
|
|
# If we didn't skip conda, activate the environment
|
|
|
|
# to install FlashAttention
|
|
|
|
if [ $SKIP_CONDA -eq 0 ]; then
|
|
|
|
source activate transformers
|
|
|
|
fi
|
|
|
|
if [[ $REQUIREMENTS_FILE =~ -nvidia.yml$ ]]; then
|
|
|
|
#TODO: FlashAttention is supported on nvidia and ROCm, but ROCm install can't be done this easily
|
|
|
|
pip install flash-attn --no-build-isolation
|
|
|
|
fi
|
2024-01-07 23:37:02 +00:00
|
|
|
|
2024-04-11 07:44:39 +00:00
|
|
|
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
2024-01-07 23:37:02 +00:00
|
|
|
pip cache purge
|
|
|
|
fi
|