whisper.cpp/models/download-ggml-model.sh
Michael Rienstra 6aa1d7b892
Some checks failed
CI / ubuntu-latest (linux/amd64) (push) Has been cancelled
CI / ubuntu-latest (linux/arm/v7) (push) Has been cancelled
CI / ubuntu-latest (linux/arm64) (push) Has been cancelled
CI / ubuntu-latest (linux/ppc64le) (push) Has been cancelled
CI / macOS-latest (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/amd64, Debug) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/amd64, Release) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/arm/v7, Debug) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/arm/v7, Release) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/arm64, Debug) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/arm64, Release) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/ppc64le, Debug) (push) Has been cancelled
CI / ubuntu-latest-gcc (linux/ppc64le, Release) (push) Has been cancelled
CI / ubuntu-latest-clang (linux/amd64, Debug) (push) Has been cancelled
CI / ubuntu-latest-clang (linux/amd64, Release) (push) Has been cancelled
CI / ubuntu-latest-clang (linux/arm64, Debug) (push) Has been cancelled
CI / ubuntu-latest-clang (linux/arm64, Release) (push) Has been cancelled
CI / ubuntu-latest-clang (linux/ppc64le, Debug) (push) Has been cancelled
CI / ubuntu-latest-clang (linux/ppc64le, Release) (push) Has been cancelled
CI / ubuntu-latest-gcc-sanitized (linux/amd64, ADDRESS) (push) Has been cancelled
CI / ubuntu-latest-gcc-sanitized (linux/amd64, THREAD) (push) Has been cancelled
CI / ubuntu-latest-gcc-sanitized (linux/amd64, UNDEFINED) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl (linux/amd64, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl (linux/arm/v7, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl (linux/arm64, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl (linux/ppc64le, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl-fp16 (linux/amd64, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl-fp16 (linux/arm/v7, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl-fp16 (linux/arm64, icx, icpx, ON) (push) Has been cancelled
CI / ubuntu-22-cmake-sycl-fp16 (linux/ppc64le, icx, icpx, ON) (push) Has been cancelled
CI / windows-msys2 (Release, clang-x86_64, CLANG64) (push) Has been cancelled
CI / windows-msys2 (Release, ucrt-x86_64, UCRT64) (push) Has been cancelled
CI / windows (Win32, Release, win32-x86, x86, 2.28.5, ON) (push) Has been cancelled
CI / windows (x64, Release, win32-x86-64, x64, 2.28.5, ON) (push) Has been cancelled
CI / windows-blas (Win32, ON, Release, x86, 2.28.5, ON) (push) Has been cancelled
CI / windows-blas (x64, ON, Release, x64, 2.28.5, ON) (push) Has been cancelled
CI / emscripten (Release) (push) Has been cancelled
CI / ios-xcode-build (Release) (push) Has been cancelled
CI / quantize (push) Has been cancelled
Publish Docker image / Push Docker image to Docker Hub (map[dockerfile:.devops/main.Dockerfile platform:linux/amd64,linux/arm64 tag:main]) (push) Has been cancelled
models : fix typo in download-ggml-model.sh (#2623)
Introduced in #2589
2024-12-12 18:02:00 +02:00

133 lines
3.1 KiB
Bash
Executable File

#!/bin/sh
# This script downloads Whisper model files that have already been converted to ggml format.
# This way you don't have to convert them yourself.
#src="https://ggml.ggerganov.com"
#pfx="ggml-model-whisper"
src="https://huggingface.co/ggerganov/whisper.cpp"
pfx="resolve/main/ggml"
BOLD="\033[1m"
RESET='\033[0m'
# get the path of this script
get_script_path() {
if [ -x "$(command -v realpath)" ]; then
dirname "$(realpath "$0")"
else
_ret="$(cd -- "$(dirname "$0")" >/dev/null 2>&1 || exit ; pwd -P)"
echo "$_ret"
fi
}
models_path="${2:-$(get_script_path)}"
# Whisper models
models="tiny
tiny.en
tiny-q5_1
tiny.en-q5_1
tiny-q8_0
base
base.en
base-q5_1
base.en-q5_1
base-q8_0
small
small.en
small.en-tdrz
small-q5_1
small.en-q5_1
small-q8_0
medium
medium.en
medium-q5_0
medium.en-q5_0
medium-q8_0
large-v1
large-v2
large-v2-q5_0
large-v2-q8_0
large-v3
large-v3-q5_0
large-v3-turbo
large-v3-turbo-q5_0
large-v3-turbo-q8_0"
# list available models
list_models() {
printf "\n"
printf "Available models:"
model_class=""
for model in $models; do
this_model_class="${model%%[.-]*}"
if [ "$this_model_class" != "$model_class" ]; then
printf "\n "
model_class=$this_model_class
fi
printf " %s" "$model"
done
printf "\n\n"
}
if [ "$#" -lt 1 ] || [ "$#" -gt 2 ]; then
printf "Usage: %s <model> [models_path]\n" "$0"
list_models
printf "___________________________________________________________\n"
printf "${BOLD}.en${RESET} = english-only ${BOLD}-q5_[01]${RESET} = quantized ${BOLD}-tdrz${RESET} = tinydiarize\n"
exit 1
fi
model=$1
if ! echo "$models" | grep -q -w "$model"; then
printf "Invalid model: %s\n" "$model"
list_models
exit 1
fi
# check if model contains `tdrz` and update the src and pfx accordingly
if echo "$model" | grep -q "tdrz"; then
src="https://huggingface.co/akashmjn/tinydiarize-whisper.cpp"
pfx="resolve/main/ggml"
fi
echo "$model" | grep -q '^"tdrz"*$'
# download ggml model
printf "Downloading ggml model %s from '%s' ...\n" "$model" "$src"
cd "$models_path" || exit
if [ -f "ggml-$model.bin" ]; then
printf "Model %s already exists. Skipping download.\n" "$model"
exit 0
fi
if [ -x "$(command -v wget2)" ]; then
wget2 --no-config --progress bar -O ggml-"$model".bin $src/$pfx-"$model".bin
elif [ -x "$(command -v wget)" ]; then
wget --no-config --quiet --show-progress -O ggml-"$model".bin $src/$pfx-"$model".bin
elif [ -x "$(command -v curl)" ]; then
curl -L --output ggml-"$model".bin $src/$pfx-"$model".bin
else
printf "Either wget or curl is required to download models.\n"
exit 1
fi
if [ $? -ne 0 ]; then
printf "Failed to download ggml model %s \n" "$model"
printf "Please try again later or download the original Whisper model files and convert them yourself.\n"
exit 1
fi
printf "Done! Model '%s' saved in '%s/ggml-%s.bin'\n" "$model" "$models_path" "$model"
printf "You can now use it like this:\n\n"
printf " $ ./main -m %s/ggml-%s.bin -f samples/jfk.wav\n" "$models_path" "$model"
printf "\n"