2024-01-12 12:11:04 +00:00
|
|
|
#!/bin/sh
|
2023-04-15 10:21:27 +00:00
|
|
|
|
|
|
|
# Usage: ./generate-coreml-model.sh <model-name>
|
2023-10-04 09:00:25 +00:00
|
|
|
if [ $# -eq 0 ]; then
|
|
|
|
echo "No model name supplied"
|
|
|
|
echo "Usage for Whisper models: ./generate-coreml-model.sh <model-name>"
|
|
|
|
echo "Usage for HuggingFace models: ./generate-coreml-model.sh -h5 <model-name> <model-path>"
|
|
|
|
exit 1
|
2024-01-12 12:11:04 +00:00
|
|
|
elif [ "$1" = "-h5" ] && [ $# != 3 ]; then
|
2023-10-04 09:00:25 +00:00
|
|
|
echo "No model name and model path supplied for a HuggingFace model"
|
|
|
|
echo "Usage for HuggingFace models: ./generate-coreml-model.sh -h5 <model-name> <model-path>"
|
|
|
|
exit 1
|
2023-04-15 10:21:27 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
mname="$1"
|
|
|
|
|
|
|
|
wd=$(dirname "$0")
|
2024-01-12 12:11:04 +00:00
|
|
|
cd "$wd/../" || exit
|
2023-04-15 10:21:27 +00:00
|
|
|
|
2024-01-12 12:11:04 +00:00
|
|
|
if [ "$mname" = "-h5" ]; then
|
2023-10-04 09:00:25 +00:00
|
|
|
mname="$2"
|
|
|
|
mpath="$3"
|
2024-01-12 12:11:04 +00:00
|
|
|
echo "$mpath"
|
|
|
|
python3 models/convert-h5-to-coreml.py --model-name "$mname" --model-path "$mpath" --encoder-only True
|
2023-10-04 09:00:25 +00:00
|
|
|
else
|
2024-01-12 12:11:04 +00:00
|
|
|
python3 models/convert-whisper-to-coreml.py --model "$mname" --encoder-only True --optimize-ane True
|
2023-10-04 09:00:25 +00:00
|
|
|
fi
|
2023-04-15 10:21:27 +00:00
|
|
|
|
2024-01-12 12:11:04 +00:00
|
|
|
xcrun coremlc compile models/coreml-encoder-"${mname}".mlpackage models/
|
|
|
|
rm -rf models/ggml-"${mname}"-encoder.mlmodelc
|
|
|
|
mv -v models/coreml-encoder-"${mname}".mlmodelc models/ggml-"${mname}"-encoder.mlmodelc
|
2023-04-15 10:21:27 +00:00
|
|
|
|
|
|
|
# TODO: decoder (sometime in the future maybe)
|
|
|
|
#xcrun coremlc compile models/whisper-decoder-${mname}.mlpackage models/
|
|
|
|
#rm -rf models/ggml-${mname}-decoder.mlmodelc
|
|
|
|
#mv -v models/coreml_decoder_${mname}.mlmodelc models/ggml-${mname}-decoder.mlmodelc
|