2022-11-02 06:46:49 +00:00
i f n d e f U N A M E _ S
2022-10-05 17:41:35 +00:00
UNAME_S := $( shell uname -s)
2022-11-02 06:46:49 +00:00
e n d i f
i f n d e f U N A M E _ P
2022-10-05 17:41:35 +00:00
UNAME_P := $( shell uname -p)
2022-11-02 06:46:49 +00:00
e n d i f
i f n d e f U N A M E _ M
2022-10-05 17:41:35 +00:00
UNAME_M := $( shell uname -m)
2022-11-02 06:46:49 +00:00
e n d i f
2022-10-05 17:41:35 +00:00
2022-10-18 23:01:53 +00:00
# Mac OS + Arm can report x86_64
# ref: https://github.com/ggerganov/whisper.cpp/issues/66#issuecomment-1282546789
i f e q ( $( UNAME_S ) , D a r w i n )
ifneq ( $( UNAME_P) ,arm)
SYSCTL_M := $( shell sysctl -n hw.optional.arm64)
ifeq ( $( SYSCTL_M) ,1)
2022-11-02 16:00:19 +00:00
# UNAME_P := arm
# UNAME_M := arm64
2022-10-18 23:01:53 +00:00
warn := $( warning Your arch is announced as x86_64, but it seems to actually be ARM64. Not fixing that can lead to bad performance. For more info see: https://github.com/ggerganov/whisper.cpp/issues/66\# issuecomment-1282546789)
endif
endif
e n d i f
2022-10-05 17:41:35 +00:00
#
# Compile flags
#
2022-10-25 16:13:08 +00:00
CFLAGS = -I. -O3 -std= c11
CXXFLAGS = -I. -I./examples -O3 -std= c++11
2022-10-17 18:44:16 +00:00
LDFLAGS =
2022-10-05 17:41:35 +00:00
# OS specific
# TODO: support Windows
i f e q ( $( UNAME_S ) , L i n u x )
2022-10-05 18:34:41 +00:00
CFLAGS += -pthread
CXXFLAGS += -pthread
2022-10-05 17:41:35 +00:00
e n d i f
i f e q ( $( UNAME_S ) , D a r w i n )
2022-10-05 18:34:41 +00:00
CFLAGS += -pthread
CXXFLAGS += -pthread
2022-10-05 17:41:35 +00:00
e n d i f
2022-10-17 15:09:17 +00:00
i f e q ( $( UNAME_S ) , F r e e B S D )
CFLAGS += -pthread
CXXFLAGS += -pthread
e n d i f
2022-10-05 17:41:35 +00:00
# Architecture specific
2022-10-08 14:35:55 +00:00
# TODO: probably these flags need to be tweaked on some architectures
2022-10-17 15:09:17 +00:00
# feel free to update the Makefile for your architecture and send a pull request or issue
2022-10-08 14:35:55 +00:00
i f e q ( $( UNAME_M ) , x 8 6 _ 6 4 )
2022-10-05 17:41:35 +00:00
CFLAGS += -mavx -mavx2 -mfma -mf16c
e n d i f
2022-10-17 15:09:17 +00:00
i f e q ( $( UNAME_M ) , a m d 6 4 )
CFLAGS += -mavx -mavx2 -mfma -mf16c
e n d i f
2022-11-02 16:00:19 +00:00
i f n d e f W H I S P E R _ N O _ A C C E L E R A T E
2022-10-17 18:44:16 +00:00
# Mac M1 - include Accelerate framework
ifeq ( $( UNAME_S) ,Darwin)
2022-11-12 06:32:03 +00:00
CFLAGS += -DGGML_USE_ACCELERATE -DGGML_PERF
LDFLAGS += -framework Foundation -framework Accelerate -framework Metal -framework MetalKit -framework MetalPerformanceShaders
2022-10-17 18:44:16 +00:00
endif
2022-10-05 17:41:35 +00:00
e n d i f
2022-10-08 14:35:55 +00:00
i f n e q ( $( filter aarch 64%,$ ( UNAME_M ) ) , )
e n d i f
i f n e q ( $( filter armv 6%,$ ( UNAME_M ) ) , )
2022-10-05 18:34:41 +00:00
# Raspberry Pi 1, 2, 3
CFLAGS += -mfpu= neon-fp-armv8 -mfp16-format= ieee -mno-unaligned-access
2022-10-05 17:41:35 +00:00
e n d i f
2022-10-05 18:34:41 +00:00
i f n e q ( $( filter armv 7%,$ ( UNAME_M ) ) , )
2022-10-05 17:41:35 +00:00
# Raspberry Pi 4
2022-10-05 18:34:41 +00:00
CFLAGS += -mfpu= neon-fp-armv8 -mfp16-format= ieee -mno-unaligned-access -funsafe-math-optimizations
e n d i f
i f n e q ( $( filter armv 8%,$ ( UNAME_M ) ) , )
# Raspberry Pi 4
CFLAGS += -mfp16-format= ieee -mno-unaligned-access
2022-10-05 17:41:35 +00:00
e n d i f
#
# Build library + main
#
2022-10-02 14:55:45 +00:00
2022-11-12 06:32:03 +00:00
main : examples /main /main .cpp ggml .o ggml -mtl .o whisper .o
$( CXX) $( CXXFLAGS) examples/main/main.cpp whisper.o ggml.o ggml-mtl.o -o main $( LDFLAGS)
2022-09-26 06:36:51 +00:00
./main -h
2022-09-25 18:23:15 +00:00
ggml.o : ggml .c ggml .h
2022-11-02 06:46:49 +00:00
$( CC) $( CFLAGS) -c ggml.c -o ggml.o
2022-09-25 18:23:15 +00:00
2022-11-12 06:32:03 +00:00
ggml-mtl.o : ggml -mtl .m ggml -mtl .h
$( CC) $( CFLAGS) -c ggml-mtl.m -o ggml-mtl.o
2022-10-04 17:35:01 +00:00
whisper.o : whisper .cpp whisper .h
2022-11-02 06:46:49 +00:00
$( CXX) $( CXXFLAGS) -c whisper.cpp -o whisper.o
2022-10-04 17:35:01 +00:00
2022-11-12 06:32:03 +00:00
libwhisper.a : ggml .o ggml -mtl .o whisper .o
$( AR) rcs libwhisper.a ggml.o ggml-mtl.o whisper.o
2022-10-10 01:16:42 +00:00
2022-09-25 18:23:15 +00:00
clean :
2022-10-25 16:13:08 +00:00
rm -f *.o main stream bench libwhisper.a
2022-09-25 18:23:15 +00:00
2022-10-05 17:41:35 +00:00
#
# Examples
#
CC_SDL = ` sdl2-config --cflags --libs`
2022-10-25 16:13:08 +00:00
stream : examples /stream /stream .cpp ggml .o whisper .o
$( CXX) $( CXXFLAGS) examples/stream/stream.cpp ggml.o whisper.o -o stream $( CC_SDL) $( LDFLAGS)
bench : examples /bench /bench .cpp ggml .o whisper .o
$( CXX) $( CXXFLAGS) examples/bench/bench.cpp ggml.o whisper.o -o bench $( LDFLAGS)
2022-10-05 17:41:35 +00:00
#
# Audio samples
#
2022-09-26 06:36:51 +00:00
# download a few audio samples into folder "./samples":
2022-09-25 18:23:15 +00:00
.PHONY : samples
samples :
@echo "Downloading samples..."
2022-09-25 19:35:26 +00:00
@mkdir -p samples
2022-09-25 18:23:15 +00:00
@wget --quiet --show-progress -O samples/gb0.ogg https://upload.wikimedia.org/wikipedia/commons/2/22/George_W._Bush%27s_weekly_radio_address_%28November_1%2C_2008%29.oga
@wget --quiet --show-progress -O samples/gb1.ogg https://upload.wikimedia.org/wikipedia/commons/1/1f/George_W_Bush_Columbia_FINAL.ogg
@wget --quiet --show-progress -O samples/hp0.ogg https://upload.wikimedia.org/wikipedia/en/d/d4/En.henryfphillips.ogg
2022-09-30 16:33:09 +00:00
@wget --quiet --show-progress -O samples/mm1.wav https://cdn.openai.com/whisper/draft-20220913a/micro-machines.wav
2022-09-25 18:23:15 +00:00
@echo "Converting to 16-bit WAV ..."
@ffmpeg -loglevel -0 -y -i samples/gb0.ogg -ar 16000 -ac 1 -c:a pcm_s16le samples/gb0.wav
@ffmpeg -loglevel -0 -y -i samples/gb1.ogg -ar 16000 -ac 1 -c:a pcm_s16le samples/gb1.wav
@ffmpeg -loglevel -0 -y -i samples/hp0.ogg -ar 16000 -ac 1 -c:a pcm_s16le samples/hp0.wav
2022-09-30 16:33:09 +00:00
@ffmpeg -loglevel -0 -y -i samples/mm1.wav -ar 16000 -ac 1 -c:a pcm_s16le samples/mm0.wav
@rm samples/mm1.wav
2022-09-25 18:23:15 +00:00
2022-10-05 17:41:35 +00:00
#
# Models
#
2022-09-25 18:23:15 +00:00
2022-09-26 06:36:51 +00:00
# if not already downloaded, the following targets download the specified model and
# runs it on all samples in the folder "./samples":
2022-09-25 18:23:15 +00:00
2022-09-26 06:36:51 +00:00
.PHONY : tiny .en
2022-09-28 17:46:05 +00:00
.PHONY : tiny
2022-09-26 06:36:51 +00:00
.PHONY : base .en
2022-09-28 17:46:05 +00:00
.PHONY : base
2022-09-25 18:23:15 +00:00
.PHONY : small .en
2022-09-28 17:46:05 +00:00
.PHONY : small
.PHONY : medium .en
.PHONY : medium
.PHONY : large
2022-09-25 18:23:15 +00:00
2022-09-28 17:46:05 +00:00
tiny.en tiny base.en base small.en small medium.en medium large : main
2022-10-25 16:13:08 +00:00
bash ./models/download-ggml-model.sh $@
2022-09-25 19:35:26 +00:00
@echo ""
2022-09-25 18:23:15 +00:00
@echo "==============================================="
2022-09-26 06:36:51 +00:00
@echo " Running $@ on all samples in ./samples ... "
2022-09-25 18:23:15 +00:00
@echo "==============================================="
@echo ""
@for f in samples/*.wav; do \
echo "----------------------------------------------" ; \
echo " [+] Running base.en on $$ f ... (run 'ffplay $$ f' to listen) " ; \
echo "----------------------------------------------" ; \
echo "" ; \
2022-09-26 06:36:51 +00:00
./main -m models/ggml-$@ .bin -f $$ f ; \
2022-09-25 18:23:15 +00:00
echo "" ; \
done