mirror of
https://github.com/ggerganov/whisper.cpp.git
synced 2025-06-13 04:28:07 +00:00
whisper.swiftui : add model download list & bench methods (#2546)
* swift : fix resources & exclude build * whisper : impl whisper_timings struct & api * whisper.swiftui : model list & bench methods * whisper : return ptr for whisper_get_timings * revert unnecessary change * whisper : avoid designated initializer * whisper.swiftui: code style changes * whisper.swiftui : get device name / os from UIDevice * whisper.swiftui : fix UIDevice usage * whisper.swiftui : add memcpy and ggml_mul_mat (commented)
This commit is contained in:
@ -0,0 +1,17 @@
|
||||
import Foundation
|
||||
|
||||
struct Model: Identifiable {
|
||||
var id = UUID()
|
||||
var name: String
|
||||
var info: String
|
||||
var url: String
|
||||
|
||||
var filename: String
|
||||
var fileURL: URL {
|
||||
FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename)
|
||||
}
|
||||
|
||||
func fileExists() -> Bool {
|
||||
FileManager.default.fileExists(atPath: fileURL.path)
|
||||
}
|
||||
}
|
@ -14,7 +14,7 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
private var recordedFile: URL? = nil
|
||||
private var audioPlayer: AVAudioPlayer?
|
||||
|
||||
private var modelUrl: URL? {
|
||||
private var builtInModelUrl: URL? {
|
||||
Bundle.main.url(forResource: "ggml-base.en", withExtension: "bin", subdirectory: "models")
|
||||
}
|
||||
|
||||
@ -28,23 +28,59 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
|
||||
override init() {
|
||||
super.init()
|
||||
loadModel()
|
||||
}
|
||||
|
||||
func loadModel(path: URL? = nil, log: Bool = true) {
|
||||
do {
|
||||
try loadModel()
|
||||
whisperContext = nil
|
||||
if (log) { messageLog += "Loading model...\n" }
|
||||
let modelUrl = path ?? builtInModelUrl
|
||||
if let modelUrl {
|
||||
whisperContext = try WhisperContext.createContext(path: modelUrl.path())
|
||||
if (log) { messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" }
|
||||
} else {
|
||||
if (log) { messageLog += "Could not locate model\n" }
|
||||
}
|
||||
canTranscribe = true
|
||||
} catch {
|
||||
print(error.localizedDescription)
|
||||
messageLog += "\(error.localizedDescription)\n"
|
||||
if (log) { messageLog += "\(error.localizedDescription)\n" }
|
||||
}
|
||||
}
|
||||
|
||||
private func loadModel() throws {
|
||||
messageLog += "Loading model...\n"
|
||||
if let modelUrl {
|
||||
whisperContext = try WhisperContext.createContext(path: modelUrl.path())
|
||||
messageLog += "Loaded model \(modelUrl.lastPathComponent)\n"
|
||||
} else {
|
||||
messageLog += "Could not locate model\n"
|
||||
|
||||
func benchCurrentModel() async {
|
||||
if whisperContext == nil {
|
||||
messageLog += "Cannot bench without loaded model\n"
|
||||
return
|
||||
}
|
||||
messageLog += "Running benchmark for loaded model\n"
|
||||
let result = await whisperContext?.benchFull(modelName: "<current>", nThreads: Int32(min(4, cpuCount())))
|
||||
if (result != nil) { messageLog += result! + "\n" }
|
||||
}
|
||||
|
||||
func bench(models: [Model]) async {
|
||||
let nThreads = Int32(min(4, cpuCount()))
|
||||
|
||||
// messageLog += "Running memcpy benchmark\n"
|
||||
// messageLog += await WhisperContext.benchMemcpy(nThreads: nThreads) + "\n"
|
||||
//
|
||||
// messageLog += "Running ggml_mul_mat benchmark with \(nThreads) threads\n"
|
||||
// messageLog += await WhisperContext.benchGgmlMulMat(nThreads: nThreads) + "\n"
|
||||
|
||||
messageLog += "Running benchmark for all downloaded models\n"
|
||||
messageLog += "| CPU | OS | Config | Model | Th | FA | Enc. | Dec. | Bch5 | PP | Commit |\n"
|
||||
messageLog += "| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n"
|
||||
for model in models {
|
||||
loadModel(path: model.fileURL, log: false)
|
||||
if whisperContext == nil {
|
||||
messageLog += "Cannot bench without loaded model\n"
|
||||
break
|
||||
}
|
||||
let result = await whisperContext?.benchFull(modelName: model.name, nThreads: nThreads)
|
||||
if (result != nil) { messageLog += result! + "\n" }
|
||||
}
|
||||
messageLog += "Benchmarking completed\n"
|
||||
}
|
||||
|
||||
func transcribeSample() async {
|
||||
@ -160,3 +196,8 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
|
||||
isRecording = false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fileprivate func cpuCount() -> Int {
|
||||
ProcessInfo.processInfo.processorCount
|
||||
}
|
||||
|
Reference in New Issue
Block a user