From a52ee08c1ea55d43e5a80cd63efda6d252d9ed25 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 23 Oct 2022 12:24:03 +0300 Subject: [PATCH] objc : polishing the sample application --- .../whisper.objc/Base.lproj/Main.storyboard | 21 ++++++++++--------- .../whisper.objc/ViewController.m | 15 +++++++++++-- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/examples/whisper.objc/whisper.objc/Base.lproj/Main.storyboard b/examples/whisper.objc/whisper.objc/Base.lproj/Main.storyboard index 0079b25..5c92ba8 100644 --- a/examples/whisper.objc/whisper.objc/Base.lproj/Main.storyboard +++ b/examples/whisper.objc/whisper.objc/Base.lproj/Main.storyboard @@ -17,7 +17,7 @@ - - - - - - - - @@ -64,6 +64,7 @@ + diff --git a/examples/whisper.objc/whisper.objc/ViewController.m b/examples/whisper.objc/whisper.objc/ViewController.m index 6d3219b..07feb55 100644 --- a/examples/whisper.objc/whisper.objc/ViewController.m +++ b/examples/whisper.objc/whisper.objc/ViewController.m @@ -84,6 +84,9 @@ void AudioInputCallback(void * inUserData, _labelStatusInp.text = @"Status: Idle"; + [_buttonToggleCapture setTitle:@"Start capturing" forState:UIControlStateNormal]; + [_buttonToggleCapture setBackgroundColor:[UIColor grayColor]]; + stateInp.isCapturing = false; AudioQueueStop(stateInp.queue, true); @@ -98,7 +101,6 @@ void AudioInputCallback(void * inUserData, if (stateInp.isCapturing) { // stop capturing [self stopCapturing]; - [sender setTitle:@"Start Capturing" forState:UIControlStateNormal]; return; } @@ -127,6 +129,7 @@ void AudioInputCallback(void * inUserData, if (status == 0) { _labelStatusInp.text = @"Status: Capturing"; [sender setTitle:@"Stop Capturing" forState:UIControlStateNormal]; + [_buttonToggleCapture setBackgroundColor:[UIColor redColor]]; } } @@ -141,7 +144,6 @@ void AudioInputCallback(void * inUserData, if (stateInp.isCapturing) { // stop capturing [self stopCapturing]; - [sender setTitle:@"Start Capturing" forState:UIControlStateNormal]; return; } @@ -168,6 +170,8 @@ void AudioInputCallback(void * inUserData, params.n_threads = 4; params.offset_ms = 0; + CFTimeInterval startTime = CACurrentMediaTime(); + if (whisper_full(stateInp.ctx, params, stateInp.audioBufferF32, stateInp.n_samples) != 0) { NSLog(@"Failed to run the model"); _textviewResult.text = @"Failed to run the model"; @@ -175,6 +179,8 @@ void AudioInputCallback(void * inUserData, return; } + CFTimeInterval endTime = CACurrentMediaTime(); + // clear the text in the textview _textviewResult.text = @""; @@ -186,7 +192,12 @@ void AudioInputCallback(void * inUserData, _textviewResult.text = [_textviewResult.text stringByAppendingString:[NSString stringWithUTF8String:text_cur]]; } + // internal model timing whisper_print_timings(stateInp.ctx); + + NSLog(@"\nProcessing time: %5.3f", endTime - startTime); + + _textviewResult.text = [_textviewResult.text stringByAppendingString:[NSString stringWithFormat:@"\n\n[processing time: %5.3f s]", endTime - startTime]]; } //