Compare commits

..

5 Commits

Author SHA1 Message Date
6a5d195109 release : v1.4.3 2023-11-07 16:15:48 +02:00
0cbef75422 ggml : fix MIN / MAX macro re-definition 2023-11-07 16:08:46 +02:00
2cdfc4e025 whisper : add support for large v3 (#1444)
* whisper : add support for large v3

* bench : fix build + fix go bindings

* bench : fix n_mels

* models : update readme
2023-11-07 15:30:18 +02:00
973111088b android : decouple example into a library and app module (#1445) 2023-11-07 14:27:33 +02:00
11b503055e whisper : reset ctx->t_start_us when calling whisper_reset_timings() (#1434)
Co-authored-by: Ben Nortier <ben@bjnortier.com>
2023-11-07 11:04:32 +02:00
16 changed files with 85 additions and 39 deletions

View File

@ -1,6 +1,6 @@
cmake_minimum_required (VERSION 3.5)
project(whisper.cpp VERSION 1.4.2)
project(whisper.cpp VERSION 1.4.3)
# Add path to modules
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/")

View File

@ -6,7 +6,7 @@
[![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](https://opensource.org/licenses/MIT)
[![npm](https://img.shields.io/npm/v/whisper.cpp.svg)](https://www.npmjs.com/package/whisper.cpp/)
Beta: [v1.4.2](https://github.com/ggerganov/whisper.cpp/releases/tag/v1.4.2) / Stable: [v1.2.1](https://github.com/ggerganov/whisper.cpp/releases/tag/v1.2.1) / [Roadmap | F.A.Q.](https://github.com/ggerganov/whisper.cpp/discussions/126)
Beta: [v1.4.3](https://github.com/ggerganov/whisper.cpp/releases/tag/v1.4.3) / Stable: [v1.2.1](https://github.com/ggerganov/whisper.cpp/releases/tag/v1.2.1) / [Roadmap | F.A.Q.](https://github.com/ggerganov/whisper.cpp/discussions/126)
High-performance inference of [OpenAI's Whisper](https://github.com/openai/whisper) automatic speech recognition (ASR) model:

View File

@ -1,6 +1,6 @@
{
"name": "whisper.cpp",
"version": "1.4.2",
"version": "1.4.3",
"description": "Whisper speech recognition",
"main": "whisper.js",
"scripts": {

View File

@ -18,9 +18,7 @@ android {
vectorDrawables {
useSupportLibrary true
}
ndk {
abiFilters 'arm64-v8a', 'armeabi-v7a', 'x86', 'x86_64'
}
}
buildTypes {
@ -43,20 +41,10 @@ android {
composeOptions {
kotlinCompilerExtensionVersion '1.5.0'
}
ndkVersion "25.2.9519653"
externalNativeBuild {
cmake {
path = file("src/main/jni/whisper/CMakeLists.txt")
}
}
packagingOptions {
resources {
excludes += '/META-INF/{AL2.0,LGPL2.1}'
}
}
}
dependencies {
implementation project(':lib')
implementation 'androidx.activity:activity-compose:1.7.2'
implementation 'androidx.compose.material:material-icons-core:1.5.0'
implementation 'androidx.compose.material3:material3:1.1.1'

View File

@ -15,7 +15,7 @@ import androidx.lifecycle.viewmodel.initializer
import androidx.lifecycle.viewmodel.viewModelFactory
import com.whispercppdemo.media.decodeWaveFile
import com.whispercppdemo.recorder.Recorder
import com.whispercppdemo.whisper.WhisperContext
import com.whispercpp.whisper.WhisperContext
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
@ -35,7 +35,7 @@ class MainScreenViewModel(private val application: Application) : ViewModel() {
private val modelsPath = File(application.filesDir, "models")
private val samplesPath = File(application.filesDir, "samples")
private var recorder: Recorder = Recorder()
private var whisperContext: WhisperContext? = null
private var whisperContext: com.whispercpp.whisper.WhisperContext? = null
private var mediaPlayer: MediaPlayer? = null
private var recordedFile: File? = null
@ -47,7 +47,7 @@ class MainScreenViewModel(private val application: Application) : ViewModel() {
}
private suspend fun printSystemInfo() {
printMessage(String.format("System Info: %s\n", WhisperContext.getSystemInfo()))
printMessage(String.format("System Info: %s\n", com.whispercpp.whisper.WhisperContext.getSystemInfo()))
}
private suspend fun loadData() {
@ -78,7 +78,7 @@ class MainScreenViewModel(private val application: Application) : ViewModel() {
printMessage("Loading model...\n")
val models = application.assets.list("models/")
if (models != null) {
whisperContext = WhisperContext.createContextFromAsset(application.assets, "models/" + models[0])
whisperContext = com.whispercpp.whisper.WhisperContext.createContextFromAsset(application.assets, "models/" + models[0])
printMessage("Loaded model ${models[0]}.\n")
}

View File

@ -0,0 +1 @@
/build

View File

@ -0,0 +1,51 @@
plugins {
id 'com.android.library'
id 'org.jetbrains.kotlin.android'
}
android {
namespace 'com.whispercpp'
compileSdk 34
defaultConfig {
minSdk 26
targetSdk 34
versionCode 1
versionName "1.0"
ndk {
abiFilters 'arm64-v8a', 'armeabi-v7a', 'x86', 'x86_64'
}
}
buildTypes {
release {
minifyEnabled false
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
ndkVersion "25.2.9519653"
externalNativeBuild {
cmake {
path = file("src/main/jni/whisper/CMakeLists.txt")
}
}
packagingOptions {
resources {
excludes += '/META-INF/{AL2.0,LGPL2.1}'
}
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.9.0'
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:1.8.0'
}

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
</manifest>

View File

@ -1,4 +1,4 @@
package com.whispercppdemo.whisper
package com.whispercpp.whisper
import android.content.res.AssetManager
import android.os.Build

View File

@ -1,4 +1,4 @@
package com.whispercppdemo.whisper
package com.whispercpp.whisper
import android.util.Log
import java.io.BufferedReader

View File

@ -131,7 +131,7 @@ static struct whisper_context *whisper_init_from_asset(
}
JNIEXPORT jlong JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_initContextFromAsset(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_initContextFromAsset(
JNIEnv *env, jobject thiz, jobject assetManager, jstring asset_path_str) {
UNUSED(thiz);
struct whisper_context *context = NULL;
@ -142,7 +142,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_initContextFromAsset(
}
JNIEXPORT jlong JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_initContext(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_initContext(
JNIEnv *env, jobject thiz, jstring model_path_str) {
UNUSED(thiz);
struct whisper_context *context = NULL;
@ -153,7 +153,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_initContext(
}
JNIEXPORT void JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_freeContext(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_freeContext(
JNIEnv *env, jobject thiz, jlong context_ptr) {
UNUSED(env);
UNUSED(thiz);
@ -162,7 +162,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_freeContext(
}
JNIEXPORT void JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_fullTranscribe(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_fullTranscribe(
JNIEnv *env, jobject thiz, jlong context_ptr, jint num_threads, jfloatArray audio_data) {
UNUSED(thiz);
struct whisper_context *context = (struct whisper_context *) context_ptr;
@ -194,7 +194,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_fullTranscribe(
}
JNIEXPORT jint JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_getTextSegmentCount(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_getTextSegmentCount(
JNIEnv *env, jobject thiz, jlong context_ptr) {
UNUSED(env);
UNUSED(thiz);
@ -203,7 +203,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_getTextSegmentCount(
}
JNIEXPORT jstring JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_getTextSegment(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_getTextSegment(
JNIEnv *env, jobject thiz, jlong context_ptr, jint index) {
UNUSED(thiz);
struct whisper_context *context = (struct whisper_context *) context_ptr;
@ -213,7 +213,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_getTextSegment(
}
JNIEXPORT jstring JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_getSystemInfo(
Java_com_whispercpp_whisper_WhisperLib_00024Companion_getSystemInfo(
JNIEnv *env, jobject thiz
) {
UNUSED(thiz);
@ -223,7 +223,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_getSystemInfo(
}
JNIEXPORT jstring JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_benchMemcpy(JNIEnv *env, jobject thiz,
Java_com_whispercpp_whisper_WhisperLib_00024Companion_benchMemcpy(JNIEnv *env, jobject thiz,
jint n_threads) {
UNUSED(thiz);
const char *bench_ggml_memcpy = whisper_bench_memcpy_str(n_threads);
@ -231,7 +231,7 @@ Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_benchMemcpy(JNIEnv *en
}
JNIEXPORT jstring JNICALL
Java_com_whispercppdemo_whisper_WhisperLib_00024Companion_benchGgmlMulMat(JNIEnv *env, jobject thiz,
Java_com_whispercpp_whisper_WhisperLib_00024Companion_benchGgmlMulMat(JNIEnv *env, jobject thiz,
jint n_threads) {
UNUSED(thiz);
const char *bench_ggml_mul_mat = whisper_bench_ggml_mul_mat_str(n_threads);

View File

@ -14,3 +14,4 @@ dependencyResolutionManagement {
}
rootProject.name = "WhisperCppDemo"
include ':app'
include ':lib'

12
ggml.c
View File

@ -143,12 +143,6 @@ void ggml_print_backtrace(void) {
}
#endif
#undef MIN
#undef MAX
#define MIN(a, b) ((a) < (b) ? (a) : (b))
#define MAX(a, b) ((a) > (b) ? (a) : (b))
/*#define GGML_PERF*/
#define GGML_DEBUG 0
#define GGML_GELU_FP16
@ -277,6 +271,12 @@ inline static void * ggml_aligned_malloc(size_t size) {
// floating point type used to accumulate sums
typedef double ggml_float;
#undef MIN
#undef MAX
#define MIN(a, b) ((a) < (b) ? (a) : (b))
#define MAX(a, b) ((a) > (b) ? (a) : (b))
//
// global data
//

View File

@ -3714,6 +3714,7 @@ void whisper_print_timings(struct whisper_context * ctx) {
}
void whisper_reset_timings(struct whisper_context * ctx) {
ctx->t_start_us = ggml_time_us();
if (ctx->state != nullptr) {
ctx->state->t_sample_us = 0;
ctx->state->t_encode_us = 0;