From be8d74ed17ea812ad48d15ea1bfd06458467da9a Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Wed, 26 Jul 2023 13:13:25 -0700 Subject: [PATCH 01/14] API updates --- .gitignore | 2 + Example/Podfile | 5 +- Example/Podfile.lock | 8 +- .../VoiceProcessorBufferTests.swift | 70 ++++ .../VoiceProcessorTests.swift | 141 ++++++++ .../project.pbxproj | 259 +++++++++++++- .../ios-voice-processor-Example.xcscheme | 44 ++- Example/ios-voice-processor/AppDelegate.swift | 2 +- Example/ios-voice-processor/Info.plist | 6 +- .../ios-voice-processor/ViewController.swift | 29 +- VoiceProcessor.swift | 335 +++++++++++++----- VoiceProcessorBuffer.swift | 43 +++ VoiceProcessorErrors.swift | 33 ++ ios-voice-processor.podspec | 21 +- 14 files changed, 854 insertions(+), 144 deletions(-) create mode 100644 Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift create mode 100644 Example/VoiceProcessorTests/VoiceProcessorTests.swift create mode 100644 VoiceProcessorBuffer.swift create mode 100644 VoiceProcessorErrors.swift diff --git a/.gitignore b/.gitignore index 7dbf1d4..36f1afc 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.idea + # OS X .DS_Store diff --git a/Example/Podfile b/Example/Podfile index e5a8128..be2776c 100644 --- a/Example/Podfile +++ b/Example/Podfile @@ -1,8 +1,11 @@ use_frameworks! -platform :ios, '9.0' +platform :ios, '11.0' target 'ios-voice-processor_Example' do pod 'ios-voice-processor', :path => '../' +end +target 'ios-voice-processor_ExampleUITests' do + pod 'ios-voice-processor', :path => '../' end diff --git a/Example/Podfile.lock b/Example/Podfile.lock index 12afdab..98b38f9 100644 --- a/Example/Podfile.lock +++ b/Example/Podfile.lock @@ -1,5 +1,5 @@ PODS: - - ios-voice-processor (1.0.3) + - ios-voice-processor (1.1.0) DEPENDENCIES: - ios-voice-processor (from `../`) @@ -9,8 +9,8 @@ EXTERNAL SOURCES: :path: "../" SPEC CHECKSUMS: - ios-voice-processor: 65b25a8db69ea25ffba0eeef37bae71a982f34cc + ios-voice-processor: 8e32d7f980a06d392d128ef1cd19cf6ddcaca3c1 -PODFILE CHECKSUM: befabb92940e6fc9f76174d29f6ccad4b1fab725 +PODFILE CHECKSUM: f3172c9d85af56bf5f3c29596f2387f5961c52b6 -COCOAPODS: 1.10.2 +COCOAPODS: 1.11.3 diff --git a/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift b/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift new file mode 100644 index 0000000..451e6ca --- /dev/null +++ b/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift @@ -0,0 +1,70 @@ +import XCTest + +import ios_voice_processor + +class VoiceProcessorBufferTests: XCTestCase { + + let bufferSize = 512 + override func setUpWithError() throws { + continueAfterFailure = false + } + + func testWriteAndRead() { + let vpBuffer = VoiceProcessorBuffer(size: bufferSize) + + let writeSamples: [Int16] = [1, 2, 3, 4, 5] + try? vpBuffer.write(samples: writeSamples) + + let readSamples = vpBuffer.read(count: writeSamples.count) + XCTAssertEqual(readSamples, writeSamples) + } + + func testAvailableSamples() { + let writeSamples: [Int16] = [1, 2, 3, 4, 5] + let vpBuffer = VoiceProcessorBuffer(size: writeSamples.count + 1) + XCTAssertEqual(vpBuffer.availableSamples(), 0) + + try? vpBuffer.write(samples: writeSamples) + XCTAssertEqual(vpBuffer.availableSamples(), writeSamples.count) + + let readSamples = vpBuffer.read(count: 4) + XCTAssertEqual(vpBuffer.availableSamples(), writeSamples.count - readSamples.count) + + let writeSamples2: [Int16] = [6, 7] + try? vpBuffer.write(samples: writeSamples2) + XCTAssertEqual(vpBuffer.availableSamples(), writeSamples.count - readSamples.count + writeSamples2.count) + + let _ = vpBuffer.read(count: 3) + XCTAssertEqual(vpBuffer.availableSamples(), 0) + } + + func testOverwrite() { + let samplesToFill: [Int16] = [1, 2, 3, 4, 5] + let vpBuffer = VoiceProcessorBuffer(size: samplesToFill.count + 1) + try? vpBuffer.write(samples: samplesToFill) + + let additionalSamples: [Int16] = [6, 7] + XCTAssertThrowsError(try vpBuffer.write(samples: additionalSamples)) { error in + XCTAssert(error is VoiceProcessorError) + } + + let expectedSamples: [Int16] = [3, 4, 5, 6, 7] + let readSamples = vpBuffer.read(count: expectedSamples.count) + XCTAssertEqual(readSamples, expectedSamples) + } + + func testReadMoreThanAvailable() { + let samplesToFill: [Int16] = [1, 2, 3, 4, 5] + let vpBuffer = VoiceProcessorBuffer(size: samplesToFill.count + 1) + + try? vpBuffer.write(samples: samplesToFill) + let readSamples = vpBuffer.read(count: 10) + XCTAssertEqual(readSamples.count, samplesToFill.count) + } + + func testEmpty() { + let vpBuffer = VoiceProcessorBuffer(size: 5) + let readSamples = vpBuffer.read(count: 3) + XCTAssertTrue(readSamples.isEmpty) + } +} diff --git a/Example/VoiceProcessorTests/VoiceProcessorTests.swift b/Example/VoiceProcessorTests/VoiceProcessorTests.swift new file mode 100644 index 0000000..8195a75 --- /dev/null +++ b/Example/VoiceProcessorTests/VoiceProcessorTests.swift @@ -0,0 +1,141 @@ +// +// Copyright 2023 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import AVFoundation +import XCTest + +import ios_voice_processor + +class VoiceProcessorTests: XCTestCase { + + let frameLength: UInt32 = 512 + let sampleRate: UInt32 = 16000 + + var frameCount = 0 + var errorCount = 0 + + override func setUp() { + super.setUp() + continueAfterFailure = false + } + + override func tearDown() { + super.tearDown() + } + + func testGetInstance() { + let vp = VoiceProcessor.instance + if (!vp.hasRecordAudioPermission) { + AVAudioSession.sharedInstance().requestRecordPermission { granted in + if !granted { + XCTFail() + } + } + } + } + + func testBasic() throws { + let vp = VoiceProcessor.instance + + let vpFrameListener = VoiceProcessorFrameListener { frame in + XCTAssert(frame.count == self.frameLength) + self.frameCount += 1 + } + + let vpErrorListener = VoiceProcessorErrorListener { error in + print("\(error.errorDescription!)") + self.errorCount += 1 + } + + XCTAssert(vp.isRecording == false) + vp.addFrameListener(vpFrameListener) + vp.addErrorListener(vpErrorListener) + try vp.start(frameLength: frameLength, sampleRate: sampleRate) + XCTAssert(vp.isRecording == true) + + sleep(1) + + try vp.stop() + + XCTAssert(frameCount > 0) + XCTAssert(errorCount == 0) + XCTAssert(vp.isRecording == false) + + vp.clearErrorListeners() + vp.clearFrameListeners() + frameCount = 0 + errorCount = 0 + } + + func testInvalidSetup() throws { + let vp = VoiceProcessor.instance + + XCTAssertThrowsError(try vp.start(frameLength: 0, sampleRate: 16000)) { error in + XCTAssert(error is VoiceProcessorArgumentError) + } + + XCTAssertThrowsError(try vp.start(frameLength: 512, sampleRate: 0)) { error in + XCTAssert(error is VoiceProcessorArgumentError) + } + + try vp.start(frameLength: frameLength, sampleRate: sampleRate) + + XCTAssertThrowsError(try vp.start(frameLength: 1024, sampleRate: 44100)) { error in + XCTAssert(error is VoiceProcessorArgumentError) + } + + try vp.stop() + } + + func testAddRemoveListeners() { + let vp = VoiceProcessor.instance + + let f1 = VoiceProcessorFrameListener({_ in }) + let f2 = VoiceProcessorFrameListener({_ in }) + + let e1 = VoiceProcessorErrorListener({_ in }) + let e2 = VoiceProcessorErrorListener({_ in }) + + vp.addFrameListener(f1); + XCTAssertEqual(vp.numFrameListeners, 1); + vp.addFrameListener(f2); + XCTAssertEqual(vp.numFrameListeners, 2); + vp.removeFrameListener(f1); + XCTAssertEqual(vp.numFrameListeners, 1); + vp.removeFrameListener(f1); + XCTAssertEqual(vp.numFrameListeners, 1); + vp.removeFrameListener(f2); + XCTAssertEqual(vp.numFrameListeners, 0); + + let fs: [VoiceProcessorFrameListener] = [f1, f2]; + vp.addFrameListeners(fs); + XCTAssertEqual(vp.numFrameListeners, 2); + vp.removeFrameListeners(fs); + XCTAssertEqual(vp.numFrameListeners, 0); + vp.addFrameListeners(fs); + XCTAssertEqual(vp.numFrameListeners, 2); + vp.clearFrameListeners(); + XCTAssertEqual(vp.numFrameListeners, 0); + + vp.addErrorListener(e1); + XCTAssertEqual(vp.numErrorListeners, 1); + vp.addErrorListener(e2); + XCTAssertEqual(vp.numErrorListeners, 2); + vp.removeErrorListener(e1); + XCTAssertEqual(vp.numErrorListeners, 1); + vp.removeErrorListener(e1); + XCTAssertEqual(vp.numErrorListeners, 1); + vp.removeErrorListener(e2); + XCTAssertEqual(vp.numErrorListeners, 0); + vp.addErrorListener(e1); + XCTAssertEqual(vp.numErrorListeners, 1); + vp.clearErrorListeners(); + XCTAssertEqual(vp.numErrorListeners, 0); + } +} diff --git a/Example/ios-voice-processor.xcodeproj/project.pbxproj b/Example/ios-voice-processor.xcodeproj/project.pbxproj index 5eec139..08ccc30 100644 --- a/Example/ios-voice-processor.xcodeproj/project.pbxproj +++ b/Example/ios-voice-processor.xcodeproj/project.pbxproj @@ -7,16 +7,55 @@ objects = { /* Begin PBXBuildFile section */ + 0247FE7E2A7062B500D368BE /* VoiceProcessorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0247FE7D2A7062B500D368BE /* VoiceProcessorTests.swift */; }; + 0247FE8A2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */; }; + 0247FE8B2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + 0247FEA72A70962B00D368BE /* VoiceProcessorBufferTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0247FEA62A70962B00D368BE /* VoiceProcessorBufferTests.swift */; }; 1E0EC0D026BC5B3100A5C080 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1E0EC0CD26BC59C800A5C080 /* Main.storyboard */; }; - 1EAD175632730E856A2866A5 /* Pods_ios_voice_processor_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */; }; 1ED03A0226BC5B9B0078E0E7 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD71AFB9204008FA782 /* ViewController.swift */; }; 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD51AFB9204008FA782 /* AppDelegate.swift */; }; 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDC1AFB9204008FA782 /* Images.xcassets */; }; + 66A25692DB18E8BE74444FD0 /* Pods_ios_voice_processor_ExampleUITests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C0C433ACC5D32169CB6CAE92 /* Pods_ios_voice_processor_ExampleUITests.framework */; }; + F9E82547CB5426845C7CC97E /* Pods_ios_voice_processor_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0247FE9E2A70689500D368BE /* Pods_ios_voice_processor_Example.framework */; }; /* End PBXBuildFile section */ +/* Begin PBXContainerItemProxy section */ + 0247FE812A7062B500D368BE /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 607FACC81AFB9204008FA782 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 607FACCF1AFB9204008FA782; + remoteInfo = "ios-voice-processor_Example"; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 0247FE8C2A70660A00D368BE /* Embed Frameworks */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + 0247FE8B2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Embed Frameworks */, + ); + name = "Embed Frameworks"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + /* Begin PBXFileReference section */ + 0247FE7B2A7062B500D368BE /* ios-voice-processor_ExampleUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "ios-voice-processor_ExampleUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FE7D2A7062B500D368BE /* VoiceProcessorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceProcessorTests.swift; sourceTree = ""; }; + 0247FE862A7065B000D368BE /* Pods_ios_voice_processor_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = Pods_ios_voice_processor_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FE882A7065FD00D368BE /* Pods_ios_voice_processor_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = Pods_ios_voice_processor_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FE8D2A70668400D368BE /* ios_voice_processor.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = ios_voice_processor.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FE9C2A70685500D368BE /* Pods_ios_voice_processor_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = Pods_ios_voice_processor_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FE9E2A70689500D368BE /* Pods_ios_voice_processor_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = Pods_ios_voice_processor_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FEA02A7068C300D368BE /* ios_voice_processor.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = ios_voice_processor.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 0247FEA62A70962B00D368BE /* VoiceProcessorBufferTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceProcessorBufferTests.swift; sourceTree = ""; }; 1E0EC0CE26BC59C800A5C080 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_ios_voice_processor_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 36E649CB7ECFF0FE966B6535 /* Pods-ios-voice-processor_ExampleUITests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ios-voice-processor_ExampleUITests.release.xcconfig"; path = "Target Support Files/Pods-ios-voice-processor_ExampleUITests/Pods-ios-voice-processor_ExampleUITests.release.xcconfig"; sourceTree = ""; }; 5D8BCFFC8E9EA514C2A09937 /* ios-voice-processor.podspec */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = "ios-voice-processor.podspec"; path = "../ios-voice-processor.podspec"; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; 607FACD01AFB9204008FA782 /* ios-voice-processor_Example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "ios-voice-processor_Example.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 607FACD41AFB9204008FA782 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; @@ -25,26 +64,53 @@ 607FACDC1AFB9204008FA782 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; }; 8B9B48BADD35114B8CAEA581 /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 9AB8D9298C33D6D267509D99 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; + B93F49F857B5BE6CDF82B6D9 /* Pods-ios-voice-processor_ExampleUITests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ios-voice-processor_ExampleUITests.debug.xcconfig"; path = "Target Support Files/Pods-ios-voice-processor_ExampleUITests/Pods-ios-voice-processor_ExampleUITests.debug.xcconfig"; sourceTree = ""; }; + C0C433ACC5D32169CB6CAE92 /* Pods_ios_voice_processor_ExampleUITests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_ios_voice_processor_ExampleUITests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; D1EFD1FF4DA5E077FC1E9E1E /* Pods-ios-voice-processor_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ios-voice-processor_Example.release.xcconfig"; path = "Target Support Files/Pods-ios-voice-processor_Example/Pods-ios-voice-processor_Example.release.xcconfig"; sourceTree = ""; }; FF72E2E2D4E514B69EB4A009 /* Pods-ios-voice-processor_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ios-voice-processor_Example.debug.xcconfig"; path = "Target Support Files/Pods-ios-voice-processor_Example/Pods-ios-voice-processor_Example.debug.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ + 0247FE782A7062B500D368BE /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 66A25692DB18E8BE74444FD0 /* Pods_ios_voice_processor_ExampleUITests.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; 607FACCD1AFB9204008FA782 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 1EAD175632730E856A2866A5 /* Pods_ios_voice_processor_Example.framework in Frameworks */, + 0247FE8A2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Frameworks */, + F9E82547CB5426845C7CC97E /* Pods_ios_voice_processor_Example.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 0247FE7C2A7062B500D368BE /* VoiceProcessorTests */ = { + isa = PBXGroup; + children = ( + 0247FE7D2A7062B500D368BE /* VoiceProcessorTests.swift */, + 0247FEA62A70962B00D368BE /* VoiceProcessorBufferTests.swift */, + ); + path = VoiceProcessorTests; + sourceTree = ""; + }; 4D9E642CCDE60B7AB3F878D1 /* Frameworks */ = { isa = PBXGroup; children = ( + 0247FEA02A7068C300D368BE /* ios_voice_processor.framework */, + 0247FE9E2A70689500D368BE /* Pods_ios_voice_processor_Example.framework */, + 0247FE9C2A70685500D368BE /* Pods_ios_voice_processor_Example.framework */, + 0247FE8D2A70668400D368BE /* ios_voice_processor.framework */, + 0247FE882A7065FD00D368BE /* Pods_ios_voice_processor_Example.framework */, + 0247FE862A7065B000D368BE /* Pods_ios_voice_processor_Example.framework */, 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */, + C0C433ACC5D32169CB6CAE92 /* Pods_ios_voice_processor_ExampleUITests.framework */, ); name = Frameworks; sourceTree = ""; @@ -54,6 +120,7 @@ children = ( 607FACF51AFB993E008FA782 /* Podspec Metadata */, 607FACD21AFB9204008FA782 /* Example for ios-voice-processor */, + 0247FE7C2A7062B500D368BE /* VoiceProcessorTests */, 607FACD11AFB9204008FA782 /* Products */, 77BBDAB99E4D80C9B8F45EA4 /* Pods */, 4D9E642CCDE60B7AB3F878D1 /* Frameworks */, @@ -64,6 +131,7 @@ isa = PBXGroup; children = ( 607FACD01AFB9204008FA782 /* ios-voice-processor_Example.app */, + 0247FE7B2A7062B500D368BE /* ios-voice-processor_ExampleUITests.xctest */, ); name = Products; sourceTree = ""; @@ -104,6 +172,8 @@ children = ( FF72E2E2D4E514B69EB4A009 /* Pods-ios-voice-processor_Example.debug.xcconfig */, D1EFD1FF4DA5E077FC1E9E1E /* Pods-ios-voice-processor_Example.release.xcconfig */, + B93F49F857B5BE6CDF82B6D9 /* Pods-ios-voice-processor_ExampleUITests.debug.xcconfig */, + 36E649CB7ECFF0FE966B6535 /* Pods-ios-voice-processor_ExampleUITests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -111,6 +181,26 @@ /* End PBXGroup section */ /* Begin PBXNativeTarget section */ + 0247FE7A2A7062B500D368BE /* ios-voice-processor_ExampleUITests */ = { + isa = PBXNativeTarget; + buildConfigurationList = 0247FE832A7062B500D368BE /* Build configuration list for PBXNativeTarget "ios-voice-processor_ExampleUITests" */; + buildPhases = ( + 158FABA09B8BB2A13E7F492B /* [CP] Check Pods Manifest.lock */, + 0247FE772A7062B500D368BE /* Sources */, + 0247FE782A7062B500D368BE /* Frameworks */, + 0247FE792A7062B500D368BE /* Resources */, + 0BE716EF4412036A32C69DA6 /* [CP] Embed Pods Frameworks */, + ); + buildRules = ( + ); + dependencies = ( + 0247FE822A7062B500D368BE /* PBXTargetDependency */, + ); + name = "ios-voice-processor_ExampleUITests"; + productName = "ios-voice-processor_ExampleUITests"; + productReference = 0247FE7B2A7062B500D368BE /* ios-voice-processor_ExampleUITests.xctest */; + productType = "com.apple.product-type.bundle.ui-testing"; + }; 607FACCF1AFB9204008FA782 /* ios-voice-processor_Example */ = { isa = PBXNativeTarget; buildConfigurationList = 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "ios-voice-processor_Example" */; @@ -120,6 +210,7 @@ 607FACCD1AFB9204008FA782 /* Frameworks */, 607FACCE1AFB9204008FA782 /* Resources */, 74B1AD03E059B510840AC16D /* [CP] Embed Pods Frameworks */, + 0247FE8C2A70660A00D368BE /* Embed Frameworks */, ); buildRules = ( ); @@ -136,12 +227,19 @@ 607FACC81AFB9204008FA782 /* Project object */ = { isa = PBXProject; attributes = { - LastSwiftUpdateCheck = 0830; + LastSwiftUpdateCheck = 1310; LastUpgradeCheck = 0830; ORGANIZATIONNAME = CocoaPods; TargetAttributes = { + 0247FE7A2A7062B500D368BE = { + CreatedOnToolsVersion = 13.1; + DevelopmentTeam = 65723695GD; + ProvisioningStyle = Automatic; + TestTargetID = 607FACCF1AFB9204008FA782; + }; 607FACCF1AFB9204008FA782 = { CreatedOnToolsVersion = 6.3.1; + DevelopmentTeam = 65723695GD; LastSwiftMigration = 0900; }; }; @@ -161,11 +259,19 @@ projectRoot = ""; targets = ( 607FACCF1AFB9204008FA782 /* ios-voice-processor_Example */, + 0247FE7A2A7062B500D368BE /* ios-voice-processor_ExampleUITests */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ + 0247FE792A7062B500D368BE /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; 607FACCE1AFB9204008FA782 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; @@ -178,6 +284,24 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ + 0BE716EF4412036A32C69DA6 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-ios-voice-processor_ExampleUITests/Pods-ios-voice-processor_ExampleUITests-frameworks.sh", + "${BUILT_PRODUCTS_DIR}/ios-voice-processor/ios_voice_processor.framework", + ); + name = "[CP] Embed Pods Frameworks"; + outputPaths = ( + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ios_voice_processor.framework", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-ios-voice-processor_ExampleUITests/Pods-ios-voice-processor_ExampleUITests-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 0DC6C7279B053B4E508938F6 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -200,6 +324,28 @@ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; + 158FABA09B8BB2A13E7F492B /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-ios-voice-processor_ExampleUITests-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; 74B1AD03E059B510840AC16D /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -221,6 +367,15 @@ /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ + 0247FE772A7062B500D368BE /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 0247FE7E2A7062B500D368BE /* VoiceProcessorTests.swift in Sources */, + 0247FEA72A70962B00D368BE /* VoiceProcessorBufferTests.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; 607FACCC1AFB9204008FA782 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; @@ -232,6 +387,14 @@ }; /* End PBXSourcesBuildPhase section */ +/* Begin PBXTargetDependency section */ + 0247FE822A7062B500D368BE /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 607FACCF1AFB9204008FA782 /* ios-voice-processor_Example */; + targetProxy = 0247FE812A7062B500D368BE /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + /* Begin PBXVariantGroup section */ 1E0EC0CD26BC59C800A5C080 /* Main.storyboard */ = { isa = PBXVariantGroup; @@ -244,6 +407,71 @@ /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ + 0247FE842A7062B500D368BE /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = B93F49F857B5BE6CDF82B6D9 /* Pods-ios-voice-processor_ExampleUITests.debug.xcconfig */; + buildSettings = { + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++17"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = "$(inherited)"; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = 65723695GD; + GCC_C_LANGUAGE_STANDARD = gnu11; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "ai.picovoice.ios-voice-processor-ExampleUITests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = "ios-voice-processor_Example"; + }; + name = Debug; + }; + 0247FE852A7062B500D368BE /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 36E649CB7ECFF0FE966B6535 /* Pods-ios-voice-processor_ExampleUITests.release.xcconfig */; + buildSettings = { + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++17"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = "$(inherited)"; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = 65723695GD; + GCC_C_LANGUAGE_STANDARD = gnu11; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "ai.picovoice.ios-voice-processor-ExampleUITests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = "ios-voice-processor_Example"; + }; + name = Release; + }; 607FACED1AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { @@ -289,7 +517,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 9.3; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -336,7 +564,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 9.3; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; @@ -350,13 +578,15 @@ baseConfigurationReference = FF72E2E2D4E514B69EB4A009 /* Pods-ios-voice-processor_Example.debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + DEVELOPMENT_TEAM = 65723695GD; INFOPLIST_FILE = "ios-voice-processor/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; MODULE_NAME = ExampleApp; - PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.$(PRODUCT_NAME:rfc1034identifier)"; + PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_SWIFT3_OBJC_INFERENCE = Default; - SWIFT_VERSION = 4.0; + SWIFT_VERSION = 5.0; }; name = Debug; }; @@ -365,19 +595,30 @@ baseConfigurationReference = D1EFD1FF4DA5E077FC1E9E1E /* Pods-ios-voice-processor_Example.release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + DEVELOPMENT_TEAM = 65723695GD; INFOPLIST_FILE = "ios-voice-processor/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; MODULE_NAME = ExampleApp; - PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.$(PRODUCT_NAME:rfc1034identifier)"; + PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_SWIFT3_OBJC_INFERENCE = Default; - SWIFT_VERSION = 4.0; + SWIFT_VERSION = 5.0; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ + 0247FE832A7062B500D368BE /* Build configuration list for PBXNativeTarget "ios-voice-processor_ExampleUITests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 0247FE842A7062B500D368BE /* Debug */, + 0247FE852A7062B500D368BE /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; 607FACCB1AFB9204008FA782 /* Build configuration list for PBXProject "ios-voice-processor" */ = { isa = XCConfigurationList; buildConfigurations = ( diff --git a/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme b/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme index fa75191..1a29885 100644 --- a/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme +++ b/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme @@ -40,8 +40,16 @@ buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" - language = "" shouldUseLaunchSchemeArgsEnv = "YES"> + + + + @@ -53,24 +61,32 @@ ReferencedContainer = "container:ios-voice-processor.xcodeproj"> + + + + + + + + - - - - - - - - Bool { + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { // Override point for customization after application launch. return true } diff --git a/Example/ios-voice-processor/Info.plist b/Example/ios-voice-processor/Info.plist index f1d9d68..444e172 100644 --- a/Example/ios-voice-processor/Info.plist +++ b/Example/ios-voice-processor/Info.plist @@ -22,14 +22,14 @@ 1 LSRequiresIPhoneOS + NSMicrophoneUsageDescription + For audio recording UILaunchStoryboardName Main UIMainStoryboardFile Main UIRequiredDeviceCapabilities - - armv7 - + UISupportedInterfaceOrientations UIInterfaceOrientationPortrait diff --git a/Example/ios-voice-processor/ViewController.swift b/Example/ios-voice-processor/ViewController.swift index f9067e1..66c6448 100644 --- a/Example/ios-voice-processor/ViewController.swift +++ b/Example/ios-voice-processor/ViewController.swift @@ -1,5 +1,5 @@ // -// Copyright 2018-2021 Picovoice Inc. +// Copyright 2021-2023 Picovoice Inc. // You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" // file accompanying this source. // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on @@ -27,6 +27,9 @@ class ViewController: UIViewController { CGPoint(x: (viewSize.width - startButtonSize.width) / 2, y: (viewSize.height - startButtonSize.height) / 2) startButton.layer.cornerRadius = 0.5 * startButton.bounds.size.width startButton.clipsToBounds = true + + let frameListener = VoiceProcessorFrameListener(audioCallback) + VoiceProcessor.instance.addFrameListener(frameListener) } override func didReceiveMemoryWarning() { @@ -38,12 +41,12 @@ class ViewController: UIViewController { if !isRecording { do { - guard try VoiceProcessor.shared.hasPermissions() else { - print("Permissions denied.") + guard VoiceProcessor.instance.hasRecordAudioPermission else { + print("Audio permission is required for audio recording.") return } - - try VoiceProcessor.shared.start(frameLength: 512, sampleRate: 16000, audioCallback: self.audioCallback) + + try VoiceProcessor.instance.start(frameLength: 512, sampleRate: 16000) } catch { let alert = UIAlertController( title: "Alert", @@ -55,11 +58,21 @@ class ViewController: UIViewController { } isRecording = true - startButton.setTitle("STOP", for: UIControlState.normal) + startButton.setTitle("STOP", for: UIControl.State.normal) } else { - VoiceProcessor.shared.stop() + do { + try VoiceProcessor.instance.stop() + } catch { + let alert = UIAlertController( + title: "Alert", + message: "Could not stop voice processor.", + preferredStyle: UIAlertController.Style.alert) + alert.addAction(UIAlertAction(title: "Click", style: UIAlertAction.Style.default, handler: nil)) + self.present(alert, animated: true, completion: nil) + return + } isRecording = false - startButton.setTitle("START", for: UIControlState.normal) + startButton.setTitle("START", for: UIControl.State.normal) } } diff --git a/VoiceProcessor.swift b/VoiceProcessor.swift index db0c571..6ce1e6b 100644 --- a/VoiceProcessor.swift +++ b/VoiceProcessor.swift @@ -1,5 +1,5 @@ // -// Copyright 2021 Picovoice Inc. +// Copyright 2021-2023 Picovoice Inc. // You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" // file accompanying this source. // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on @@ -9,148 +9,281 @@ import AVFoundation +public typealias VoiceProcessorFrameCallback = ([Int16]) -> () + +public class VoiceProcessorFrameListener { + private let callback_: VoiceProcessorFrameCallback + + public init(_ callback: @escaping VoiceProcessorFrameCallback) { + callback_ = callback + } + + public var onFrame: VoiceProcessorFrameCallback { + get { + callback_ + } + } +} + +public typealias VoiceProcessorErrorCallback = (VoiceProcessorError) -> () + +public class VoiceProcessorErrorListener { + private let callback_: VoiceProcessorErrorCallback + + public init(_ callback: @escaping VoiceProcessorErrorCallback) { + callback_ = callback + } + + public var onError: VoiceProcessorErrorCallback { + get { + callback_ + } + } +} + public class VoiceProcessor { - public static let shared: VoiceProcessor = VoiceProcessor() - + public static let instance: VoiceProcessor = VoiceProcessor() + + private let lock = NSLock() private let numBuffers = 3 - private var audioQueue: AudioQueueRef? - private var audioCallback: (([Int16]) -> Void)? - private var frameLength: UInt32? - private var bufferRef: AudioQueueBufferRef? + private var audioQueue: AudioQueueRef! + private var bufferList = [AudioQueueBufferRef?](repeating: nil, count: 3) + private var circularBuffer: VoiceProcessorBuffer? - private var started = false + private var frameListeners: [VoiceProcessorFrameListener] = [] + private var errorListeners: [VoiceProcessorErrorListener] = [] + + private var isRecording_: Bool = false + public var isRecording: Bool { + isRecording_ + } + + private var frameLength_: UInt32? = nil + public var frameLength: UInt32? { + frameLength_ + } + + private var sampleRate_: UInt32? = nil + public var sampleRate: UInt32? { + sampleRate_ + } + + public var numFrameListeners: Int { + frameListeners.count + } + public var numErrorListeners: Int { + errorListeners.count + } + + public var hasRecordAudioPermission: Bool { + AVAudioSession.sharedInstance().recordPermission == .granted + } + private init() { NotificationCenter.default.addObserver( - self, - selector: #selector(handleInterruption), - name: AVAudioSession.interruptionNotification, - object: AVAudioSession.sharedInstance()) + self, + selector: #selector(handleInterruption), + name: AVAudioSession.interruptionNotification, + object: AVAudioSession.sharedInstance()) } - - public func hasPermissions() throws -> Bool { - if AVAudioSession.sharedInstance().recordPermission == .denied { - return false + + public func addFrameListener(_ listener: VoiceProcessorFrameListener) { + lock.lock() + frameListeners.append(listener) + lock.unlock() + } + + public func addFrameListeners(_ listeners: [VoiceProcessorFrameListener]) { + lock.lock() + frameListeners.append(contentsOf: listeners) + lock.unlock() + } + + public func removeFrameListener(_ listener: VoiceProcessorFrameListener) { + lock.lock() + frameListeners.removeAll { + $0 === listener } - - return true + lock.unlock() } - - public func start( - frameLength: UInt32, - sampleRate: UInt32, - audioCallback: @escaping (([Int16]) -> Void), - formatID: AudioFormatID = kAudioFormatLinearPCM, - formatFlags: AudioFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked, - bytesPerPacket: UInt32 = 2, - framesPerPacket: UInt32 = 1, - bytesPerFrame: UInt32 = 2, - channelsPerFrame: UInt32 = 1, - bitsPerChannel: UInt32 = 16, - reserved: UInt32 = 0 - ) throws { - if started { - return + + public func removeFrameListeners(_ listeners: [VoiceProcessorFrameListener]) { + lock.lock() + for listener in listeners { + frameListeners.removeAll { + $0 === listener + } } - - try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .defaultToSpeaker, .allowBluetooth]) - try AVAudioSession.sharedInstance().setActive(true, options: .notifyOthersOnDeactivation) - - var format = AudioStreamBasicDescription( - mSampleRate: Float64(sampleRate), - mFormatID: formatID, - mFormatFlags: kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked, - mBytesPerPacket: bytesPerPacket, - mFramesPerPacket: framesPerPacket, - mBytesPerFrame: bytesPerFrame, - mChannelsPerFrame: channelsPerFrame, - mBitsPerChannel: bitsPerChannel, - mReserved: reserved) - - let userData = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) - AudioQueueNewInput(&format, createAudioQueueCallback(), userData, nil, nil, 0, &audioQueue) - - guard let queue = audioQueue else { - return + lock.unlock() + } + + public func clearFrameListeners() { + lock.lock() + frameListeners.removeAll() + lock.unlock() + } + + public func addErrorListener(_ listener: VoiceProcessorErrorListener) { + lock.lock() + errorListeners.append(listener) + lock.unlock() + } + + public func removeErrorListener(_ listener: VoiceProcessorErrorListener) { + lock.lock() + errorListeners.removeAll { + $0 === listener + } + lock.unlock() + } + + public func clearErrorListeners() { + lock.lock() + errorListeners.removeAll() + lock.unlock() + } + + public func start(frameLength: UInt32, sampleRate: UInt32) throws { + if frameLength == 0 { + throw VoiceProcessorArgumentError("Frame length cannot be zero.") + } + + if sampleRate == 0 { + throw VoiceProcessorArgumentError("Sample Rate cannot be zero.") } - - self.frameLength = frameLength; - self.audioCallback = audioCallback - let bufferSize = frameLength * 2 - for _ in 0.. AudioQueueInputCallback { - return { userData, queue, bufferRef, startTimeRef, numPackets, packetDescriptions in - // `self` is passed in as userData in the audio queue callback. - guard let userData = userData else { + { userData, queue, bufferRef, startTimeRef, numPackets, packetDescriptions in + let `self` = Unmanaged.fromOpaque(userData!).takeUnretainedValue() + + guard let frameLength = self.frameLength_ else { + self.onError(VoiceProcessorRuntimeError("Unable to get audio frame: frame length is nil")) return } - let `self` = Unmanaged.fromOpaque(userData).takeUnretainedValue() - - guard let frameLength = self.frameLength else { + guard let circularBuffer = self.circularBuffer else { + self.onError(VoiceProcessorRuntimeError("Unable to get audio frame: circular buffer is nil")) + return + } + + let bufferPtr = bufferRef.pointee.mAudioData.bindMemory(to: Int16.self, capacity: Int(bufferRef.pointee.mAudioDataByteSize) / MemoryLayout.size) + let samples = Array(UnsafeBufferPointer(start: bufferPtr, count: Int(numPackets))) + + do { + try circularBuffer.write(samples: Array(samples)) + } catch let error as VoiceProcessorError { + self.onError(error) + } catch { + print("Unknown error encountered") return } - if frameLength == numPackets { - let ptr = bufferRef.pointee.mAudioData.assumingMemoryBound(to: Int16.self) - let pcm = Array(UnsafeBufferPointer(start: ptr, count: Int(frameLength))) - - if let audioCallback = self.audioCallback { - audioCallback(pcm) + if circularBuffer.availableSamples() >= frameLength { + let frame = circularBuffer.read(count: Int(frameLength)) + if (frame.count != frameLength) { + self.onError(VoiceProcessorReadError("Circular buffer returned a frame of size \(frame.count) (frameLength is \(frameLength))")) } + self.onFrame(frame) } - + AudioQueueEnqueueBuffer(queue, bufferRef, 0, nil) } } - + @objc private func handleInterruption(_ notification: NSNotification) { - guard self.started else { + guard isRecording_ else { return } guard let audioQueue = audioQueue else { + onError(VoiceProcessorRuntimeError("Unable to handle interruption: Audio queue was nil")) return } - + guard let info = notification.userInfo, - let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt, - let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt, + let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + onError(VoiceProcessorRuntimeError("Unable to handle interruption: Notification info was nil")) return } - + if type == .ended { guard let optionsValue = - info[AVAudioSessionInterruptionOptionKey] as? UInt else { - return + info[AVAudioSessionInterruptionOptionKey] as? UInt else { + onError(VoiceProcessorRuntimeError("Unable to handle interruption: Options key was nil")) + return } let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue) if options.contains(.shouldResume) { - AudioQueueEnqueueBuffer(audioQueue, self.bufferRef!, 0, nil) + for i in 0.. 0 { + throw VoiceProcessorReadError("Buffer overflow occurred - \(numOverwrite) samples dropped.") + } + } + + public func read(count: Int) -> [Int16] { + var samples: [Int16] = [] + + let numToRead = min(Int(count), availableSamples()) + for _ in 0.. Int { + let diff = writeIndex - readIndex + return diff >= 0 ? diff : diff + buffer.count + } +} diff --git a/VoiceProcessorErrors.swift b/VoiceProcessorErrors.swift new file mode 100644 index 0000000..d88857e --- /dev/null +++ b/VoiceProcessorErrors.swift @@ -0,0 +1,33 @@ +// +// Copyright 2023 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +public class VoiceProcessorError: LocalizedError { + private let message: String + + public init (_ message: String) { + self.message = message + } + + public var errorDescription: String? { + return message + } + + public var name: String { + get { + return String(describing: type(of: self)) + } + } +} + +public class VoiceProcessorArgumentError: VoiceProcessorError {} + +public class VoiceProcessorReadError: VoiceProcessorError {} + +public class VoiceProcessorRuntimeError: VoiceProcessorError {} + diff --git a/ios-voice-processor.podspec b/ios-voice-processor.podspec index 35a6b9a..beb44e8 100644 --- a/ios-voice-processor.podspec +++ b/ios-voice-processor.podspec @@ -1,5 +1,5 @@ # -# Copyright 2021 Picovoice Inc. +# Copyright 2021-2023 Picovoice Inc. # You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" # file accompanying this source. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on @@ -9,21 +9,22 @@ Pod::Spec.new do |s| s.name = 'ios-voice-processor' - s.version = '1.0.3' - s.summary = 'A cocoapod library for real-time voice processing.' + s.module_name = 'ios_voice_processor' + s.version = '1.1.0' + s.summary = 'An asynchronous iOS audio recording library designed for real-time speech audio processing.' s.description = <<-DESC -A voice processing library for ios. Has basic functionality to check record permissions, start recording, stop recording and processor -frames while recording. + The iOS Voice Processor is an asynchronous audio capture library designed for real-time audio processing. + Given some specifications, the library delivers frames of raw audio data to the user via listeners. DESC s.homepage = 'https://github.com/Picovoice/ios-voice-processor' - s.license = { :type => 'Apache', :file => 'LICENSE' } - s.author = { 'ksyeo1010' => 'kyeo@picovoice.ai' } + s.license = { :type => 'Apache 2.0' } + s.author = { 'Picovoice' => 'hello@picovoice.ai' } s.source = { :git => 'https://github.com/Picovoice/ios-voice-processor.git', :tag => s.version.to_s } - s.ios.deployment_target = '9.0' + s.ios.deployment_target = '11.0' s.swift_version = '5.0' - s.source_files = 'VoiceProcessor.swift' + s.source_files = 'VoiceProcessor*.swift' s.frameworks = 'AVFoundation' - + end From 4111272ba98071888026a226949409e7640906b8 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Wed, 26 Jul 2023 15:06:09 -0700 Subject: [PATCH 02/14] add audio dump to demo --- .../ios-voice-processor/ViewController.swift | 140 +++++++++++++----- VoiceProcessor.swift | 19 ++- 2 files changed, 116 insertions(+), 43 deletions(-) diff --git a/Example/ios-voice-processor/ViewController.swift b/Example/ios-voice-processor/ViewController.swift index 66c6448..5ddd940 100644 --- a/Example/ios-voice-processor/ViewController.swift +++ b/Example/ios-voice-processor/ViewController.swift @@ -7,24 +7,30 @@ // specific language governing permissions and limitations under the License. // +import AVFoundation import UIKit + import ios_voice_processor class ViewController: UIViewController { @IBOutlet weak var startButton: UIButton! - - var isRecording: Bool = false + + private var isRecording: Bool = false + private var recordedAudio: [Int16] = [] + + let FRAME_LENGTH: UInt32 = 512 + let SAMPLE_RATE: UInt32 = 16000 + let DUMP_AUDIO: Bool = true override func viewDidLoad() { super.viewDidLoad() - // Do any additional setup after loading the view, typically from a nib. - + let viewSize = view.frame.size let startButtonSize = CGSize(width: 120, height: 120) - + startButton.frame.size = startButtonSize startButton.frame.origin = - CGPoint(x: (viewSize.width - startButtonSize.width) / 2, y: (viewSize.height - startButtonSize.height) / 2) + CGPoint(x: (viewSize.width - startButtonSize.width) / 2, y: (viewSize.height - startButtonSize.height) / 2) startButton.layer.cornerRadius = 0.5 * startButton.bounds.size.width startButton.clipsToBounds = true @@ -36,50 +42,114 @@ class ViewController: UIViewController { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } - + @IBAction func toggleStartButton(_ sender: UIButton) { if !isRecording { - - do { - guard VoiceProcessor.instance.hasRecordAudioPermission else { - print("Audio permission is required for audio recording.") - return - } + startRecording() + } else { + stopRecording() + } + } - try VoiceProcessor.instance.start(frameLength: 512, sampleRate: 16000) - } catch { - let alert = UIAlertController( - title: "Alert", - message: "Could not start voice processor.", - preferredStyle: UIAlertController.Style.alert) - alert.addAction(UIAlertAction(title: "Click", style: UIAlertAction.Style.default, handler: nil)) - self.present(alert, animated: true, completion: nil) + private func startRecording() { + do { + guard VoiceProcessor.hasRecordAudioPermission else { + VoiceProcessor.requestRecordAudioPermission(onUserPermissionResponse) return } - - isRecording = true - startButton.setTitle("STOP", for: UIControl.State.normal) - } else { + + if DUMP_AUDIO { + recordedAudio.removeAll() + } + + try VoiceProcessor.instance.start(frameLength: FRAME_LENGTH, sampleRate: SAMPLE_RATE) + } catch { + let alert = UIAlertController( + title: "Alert", + message: "Could not start voice processor.", + preferredStyle: UIAlertController.Style.alert) + alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil)) + self.present(alert, animated: true, completion: nil) + return + } + isRecording = true + startButton.setTitle("STOP", for: UIControl.State.normal) + } + + private func stopRecording() { + do { + try VoiceProcessor.instance.stop() + } catch { + let alert = UIAlertController( + title: "Alert", + message: "Could not stop voice processor.", + preferredStyle: UIAlertController.Style.alert) + alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil)) + self.present(alert, animated: true, completion: nil) + return + } + isRecording = false + + if DUMP_AUDIO { do { - try VoiceProcessor.instance.stop() + try dumpAudio(audioData: recordedAudio, audioFileName: "ios_voice_processor.wav") } catch { + print("Failed to dump audio: \(error)") + } + } + startButton.setTitle("START", for: UIControl.State.normal) + } + + private func onUserPermissionResponse(isGranted: Bool) -> Void { + DispatchQueue.main.async { + if isGranted { + self.startRecording() + } else { let alert = UIAlertController( title: "Alert", - message: "Could not stop voice processor.", + message: "Need record audio permission for demo.", preferredStyle: UIAlertController.Style.alert) - alert.addAction(UIAlertAction(title: "Click", style: UIAlertAction.Style.default, handler: nil)) + alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil)) self.present(alert, animated: true, completion: nil) - return } - isRecording = false - startButton.setTitle("START", for: UIControl.State.normal) } } - - private func audioCallback(pcm: [Int16]) -> Void { - // do something with pcm - print("Recevied pcm with length: ", pcm.count) + + private func audioCallback(frame: [Int16]) -> Void { + if DUMP_AUDIO { + recordedAudio.append(contentsOf: frame) + } } + private func dumpAudio(audioData: [Int16], audioFileName: String) throws { + let outputDir = try FileManager.default.url( + for: .documentDirectory, + in: .userDomainMask, + appropriateFor: nil, + create: false) + print(outputDir) + let fileUrl = outputDir.appendingPathComponent(audioFileName) + + if FileManager.default.fileExists(atPath: fileUrl.path) { + try FileManager.default.removeItem(at: fileUrl) + } + let audioFormat = AVAudioFormat( + commonFormat: .pcmFormatInt16, + sampleRate: Double(SAMPLE_RATE), + channels: 1, + interleaved: true)! + + let audioFile = try AVAudioFile( + forWriting: fileUrl, + settings: audioFormat.settings, + commonFormat: .pcmFormatInt16, + interleaved: true) + + let writeBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: AVAudioFrameCount(audioData.count))! + memcpy(writeBuffer.int16ChannelData![0], audioData, audioData.count * 2) + writeBuffer.frameLength = UInt32(audioData.count) + + try audioFile.write(from: writeBuffer) + } } diff --git a/VoiceProcessor.swift b/VoiceProcessor.swift index 6ce1e6b..3ee3ce7 100644 --- a/VoiceProcessor.swift +++ b/VoiceProcessor.swift @@ -49,7 +49,7 @@ public class VoiceProcessor { private var audioQueue: AudioQueueRef! private var bufferList = [AudioQueueBufferRef?](repeating: nil, count: 3) private var circularBuffer: VoiceProcessorBuffer? - + private var frameListeners: [VoiceProcessorFrameListener] = [] private var errorListeners: [VoiceProcessorErrorListener] = [] @@ -75,11 +75,6 @@ public class VoiceProcessor { errorListeners.count } - public var hasRecordAudioPermission: Bool { - AVAudioSession.sharedInstance().recordPermission == .granted - } - - private init() { NotificationCenter.default.addObserver( self, @@ -88,6 +83,14 @@ public class VoiceProcessor { object: AVAudioSession.sharedInstance()) } + public static var hasRecordAudioPermission: Bool { + AVAudioSession.sharedInstance().recordPermission == .granted + } + + public static func requestRecordAudioPermission(_ response: @escaping (Bool) -> Void) { + AVAudioSession.sharedInstance().requestRecordPermission(response) + } + public func addFrameListener(_ listener: VoiceProcessorFrameListener) { lock.lock() frameListeners.append(listener) @@ -226,7 +229,7 @@ public class VoiceProcessor { self.onError(VoiceProcessorRuntimeError("Unable to get audio frame: frame length is nil")) return } - + guard let circularBuffer = self.circularBuffer else { self.onError(VoiceProcessorRuntimeError("Unable to get audio frame: circular buffer is nil")) return @@ -243,7 +246,7 @@ public class VoiceProcessor { print("Unknown error encountered") return } - + if circularBuffer.availableSamples() >= frameLength { let frame = circularBuffer.read(count: Int(frameLength)) if (frame.count != frameLength) { From 9a745f61b8d2e50dd2c04b0e7e510c9851de742e Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Wed, 26 Jul 2023 17:01:41 -0700 Subject: [PATCH 03/14] add vu meter to demo --- .../project.pbxproj | 4 ++ .../Base.lproj/Main.storyboard | 22 ++++++--- Example/ios-voice-processor/VUMeterView.swift | 47 +++++++++++++++++++ .../ios-voice-processor/ViewController.swift | 25 ++++++++-- 4 files changed, 86 insertions(+), 12 deletions(-) create mode 100644 Example/ios-voice-processor/VUMeterView.swift diff --git a/Example/ios-voice-processor.xcodeproj/project.pbxproj b/Example/ios-voice-processor.xcodeproj/project.pbxproj index 08ccc30..0b75b7d 100644 --- a/Example/ios-voice-processor.xcodeproj/project.pbxproj +++ b/Example/ios-voice-processor.xcodeproj/project.pbxproj @@ -16,6 +16,7 @@ 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD51AFB9204008FA782 /* AppDelegate.swift */; }; 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDC1AFB9204008FA782 /* Images.xcassets */; }; 66A25692DB18E8BE74444FD0 /* Pods_ios_voice_processor_ExampleUITests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C0C433ACC5D32169CB6CAE92 /* Pods_ios_voice_processor_ExampleUITests.framework */; }; + A52A7DC2A02D4BD6D381CF8C /* VUMeterView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A52A712372EAD5F53885CD53 /* VUMeterView.swift */; }; F9E82547CB5426845C7CC97E /* Pods_ios_voice_processor_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0247FE9E2A70689500D368BE /* Pods_ios_voice_processor_Example.framework */; }; /* End PBXBuildFile section */ @@ -64,6 +65,7 @@ 607FACDC1AFB9204008FA782 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; }; 8B9B48BADD35114B8CAEA581 /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 9AB8D9298C33D6D267509D99 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; + A52A712372EAD5F53885CD53 /* VUMeterView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VUMeterView.swift; sourceTree = ""; }; B93F49F857B5BE6CDF82B6D9 /* Pods-ios-voice-processor_ExampleUITests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ios-voice-processor_ExampleUITests.debug.xcconfig"; path = "Target Support Files/Pods-ios-voice-processor_ExampleUITests/Pods-ios-voice-processor_ExampleUITests.debug.xcconfig"; sourceTree = ""; }; C0C433ACC5D32169CB6CAE92 /* Pods_ios_voice_processor_ExampleUITests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_ios_voice_processor_ExampleUITests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; D1EFD1FF4DA5E077FC1E9E1E /* Pods-ios-voice-processor_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ios-voice-processor_Example.release.xcconfig"; path = "Target Support Files/Pods-ios-voice-processor_Example/Pods-ios-voice-processor_Example.release.xcconfig"; sourceTree = ""; }; @@ -144,6 +146,7 @@ 607FACD71AFB9204008FA782 /* ViewController.swift */, 607FACDC1AFB9204008FA782 /* Images.xcassets */, 607FACD31AFB9204008FA782 /* Supporting Files */, + A52A712372EAD5F53885CD53 /* VUMeterView.swift */, ); name = "Example for ios-voice-processor"; path = "ios-voice-processor"; @@ -382,6 +385,7 @@ files = ( 1ED03A0226BC5B9B0078E0E7 /* ViewController.swift in Sources */, 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */, + A52A7DC2A02D4BD6D381CF8C /* VUMeterView.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/Example/ios-voice-processor/Base.lproj/Main.storyboard b/Example/ios-voice-processor/Base.lproj/Main.storyboard index 29814b9..eaa5cbd 100644 --- a/Example/ios-voice-processor/Base.lproj/Main.storyboard +++ b/Example/ios-voice-processor/Base.lproj/Main.storyboard @@ -1,9 +1,9 @@ - + - + @@ -13,12 +13,12 @@ - + - + + + + + + + + - + + - + diff --git a/Example/ios-voice-processor/VUMeterView.swift b/Example/ios-voice-processor/VUMeterView.swift new file mode 100644 index 0000000..3faf3a3 --- /dev/null +++ b/Example/ios-voice-processor/VUMeterView.swift @@ -0,0 +1,47 @@ +// +// Copyright 2023 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import UIKit + +class VUMeterView: UIView { + + private let DBFS_OFFSET = 60.0 + private let VOLUME_HISTORY_CAPACITY = 5 + + private var volumeHistory: [Double] = [] + private var volumeAverage: Double = 0 + + public func addVolumeValue(dbfsValue: Double) { + + var adjustedValue = dbfsValue + DBFS_OFFSET + adjustedValue = (max(0.0, adjustedValue) / DBFS_OFFSET) + adjustedValue = min(1.0, adjustedValue) + + if volumeHistory.count == VOLUME_HISTORY_CAPACITY { + volumeHistory.removeFirst() + } + volumeHistory.append(adjustedValue) + volumeAverage = volumeHistory.reduce(0, +) / Double(volumeHistory.count) + + setNeedsDisplay() + } + + override func draw(_ rect: CGRect) { + let context = UIGraphicsGetCurrentContext() + context?.clear(rect) + + let emptyRect = CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height) + context?.setFillColor(UIColor.gray.cgColor) + context?.fill(emptyRect) + + let meterRect = CGRect(x: 0, y: 0, width: bounds.width * CGFloat(volumeAverage), height: bounds.height) + context?.setFillColor(UIColor(red: 0.216, green: 0.49, blue: 1, alpha: 1).cgColor) + context?.fill(meterRect) + } +} diff --git a/Example/ios-voice-processor/ViewController.swift b/Example/ios-voice-processor/ViewController.swift index 5ddd940..0d314c7 100644 --- a/Example/ios-voice-processor/ViewController.swift +++ b/Example/ios-voice-processor/ViewController.swift @@ -14,14 +14,15 @@ import ios_voice_processor class ViewController: UIViewController { @IBOutlet weak var startButton: UIButton! + @IBOutlet weak var vuMeterView: VUMeterView! + private let FRAME_LENGTH: UInt32 = 512 + private let SAMPLE_RATE: UInt32 = 16000 + private let DUMP_AUDIO: Bool = false + private var isRecording: Bool = false private var recordedAudio: [Int16] = [] - let FRAME_LENGTH: UInt32 = 512 - let SAMPLE_RATE: UInt32 = 16000 - let DUMP_AUDIO: Bool = true - override func viewDidLoad() { super.viewDidLoad() @@ -30,10 +31,15 @@ class ViewController: UIViewController { startButton.frame.size = startButtonSize startButton.frame.origin = - CGPoint(x: (viewSize.width - startButtonSize.width) / 2, y: (viewSize.height - startButtonSize.height) / 2) + CGPoint(x: (viewSize.width - startButtonSize.width) / 2, y: (viewSize.height - startButtonSize.height - 40)) startButton.layer.cornerRadius = 0.5 * startButton.bounds.size.width startButton.clipsToBounds = true + let vuMeterSize = CGSize(width: view.frame.width - 20, height: 80) + vuMeterView.frame.size = vuMeterSize + vuMeterView.frame.origin = CGPoint(x: (viewSize.width - vuMeterSize.width) / 2, y: (viewSize.height - vuMeterSize.height) / 2) + vuMeterView.clipsToBounds = true + let frameListener = VoiceProcessorFrameListener(audioCallback) VoiceProcessor.instance.addFrameListener(frameListener) } @@ -119,6 +125,15 @@ class ViewController: UIViewController { if DUMP_AUDIO { recordedAudio.append(contentsOf: frame) } + + let sum = frame.reduce(0) { $0 + (Double($1) * Double($1)) } + let rms = sqrt(sum / Double(frame.count)) + + let dbfs = 20 * log10(rms / Double(INT16_MAX)) + + DispatchQueue.main.async { + self.vuMeterView.addVolumeValue(dbfsValue: dbfs) + } } private func dumpAudio(audioData: [Int16], audioFileName: String) throws { From 79f9467f73b7e1f3fc5a3602e8df60d690ad20dd Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Wed, 26 Jul 2023 17:33:21 -0700 Subject: [PATCH 04/14] doc comments --- .../VoiceProcessorBufferTests.swift | 9 +++ VoiceProcessor.swift | 61 ++++++++++++++++++- VoiceProcessorBuffer.swift | 26 +++++++- 3 files changed, 92 insertions(+), 4 deletions(-) diff --git a/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift b/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift index 451e6ca..5304090 100644 --- a/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift +++ b/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift @@ -1,3 +1,12 @@ +// +// Copyright 2023 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + import XCTest import ios_voice_processor diff --git a/VoiceProcessor.swift b/VoiceProcessor.swift index 3ee3ce7..8ac80be 100644 --- a/VoiceProcessor.swift +++ b/VoiceProcessor.swift @@ -9,15 +9,21 @@ import AVFoundation +/// Typealias for the callback function that handles frames of audio data. public typealias VoiceProcessorFrameCallback = ([Int16]) -> () +/// Listener class for receiving audio frames from `VoiceProcessor` via the `onFrame` property. public class VoiceProcessorFrameListener { private let callback_: VoiceProcessorFrameCallback + /// Initializes a new `VoiceProcessorFrameListener`. + /// + /// - Parameter callback: The callback function to be called when an audio frame is received. public init(_ callback: @escaping VoiceProcessorFrameCallback) { callback_ = callback } + /// Function called when a frame of audio is received. public var onFrame: VoiceProcessorFrameCallback { get { callback_ @@ -25,15 +31,21 @@ public class VoiceProcessorFrameListener { } } +/// Typealias for the callback function that handles errors that are emitted from `VoiceProcessor`. public typealias VoiceProcessorErrorCallback = (VoiceProcessorError) -> () +/// Listener class for receiving errors from `VoiceProcessor` via the `onError` property. public class VoiceProcessorErrorListener { private let callback_: VoiceProcessorErrorCallback + /// Initializes a new `VoiceProcessorErrorListener`. + /// + /// - Parameter callback: The callback function to be called when a `VoiceProcessorError` occurs. public init(_ callback: @escaping VoiceProcessorErrorCallback) { callback_ = callback } - + + /// Function called when a `VoiceProcessorError` occurs. public var onError: VoiceProcessorErrorCallback { get { callback_ @@ -41,7 +53,10 @@ public class VoiceProcessorErrorListener { } } +/// public class VoiceProcessor { + + /// The singleton instance of `VoiceProcessor`. public static let instance: VoiceProcessor = VoiceProcessor() private let lock = NSLock() @@ -54,23 +69,30 @@ public class VoiceProcessor { private var errorListeners: [VoiceProcessorErrorListener] = [] private var isRecording_: Bool = false + private var frameLength_: UInt32? = nil + private var sampleRate_: UInt32? = nil + + /// A boolean value indicating if the `VoiceProcessor` is currently recording audio. public var isRecording: Bool { isRecording_ } - private var frameLength_: UInt32? = nil + /// The number of audio samples per frame. Set when calling the `start(frameLength:sampleRate:)` method. public var frameLength: UInt32? { frameLength_ } - private var sampleRate_: UInt32? = nil + /// The sample rate for audio recording, set when calling the `start(frameLength:sampleRate:)` method. public var sampleRate: UInt32? { sampleRate_ } + /// The number of registered `VoiceProcessorFrameListeners`. public var numFrameListeners: Int { frameListeners.count } + + /// The number of registered `VoiceProcessorErrorListeners`. public var numErrorListeners: Int { errorListeners.count } @@ -83,26 +105,39 @@ public class VoiceProcessor { object: AVAudioSession.sharedInstance()) } + /// Indicates whether the app has permission to record audio. public static var hasRecordAudioPermission: Bool { AVAudioSession.sharedInstance().recordPermission == .granted } + /// Requests permission to record audio from the user. + /// + /// - Parameter response: A closure to handle the user's response to the permission request. public static func requestRecordAudioPermission(_ response: @escaping (Bool) -> Void) { AVAudioSession.sharedInstance().requestRecordPermission(response) } + /// Adds a listener to receive audio frames. + /// + /// - Parameter listener: The `VoiceProcessorFrameListener` to be added as a frame listener. public func addFrameListener(_ listener: VoiceProcessorFrameListener) { lock.lock() frameListeners.append(listener) lock.unlock() } + /// Adds multiple frame listeners to receive audio frames. + /// + /// - Parameter listeners: An array of `VoiceProcessorFrameListener` to be added as frame listeners. public func addFrameListeners(_ listeners: [VoiceProcessorFrameListener]) { lock.lock() frameListeners.append(contentsOf: listeners) lock.unlock() } + /// Removes a previously added frame listener. + /// + /// - Parameter listener: The `VoiceProcessorFrameListener` to be removed. public func removeFrameListener(_ listener: VoiceProcessorFrameListener) { lock.lock() frameListeners.removeAll { @@ -111,6 +146,9 @@ public class VoiceProcessor { lock.unlock() } + /// Removes previously added multiple frame listeners. + /// + /// - Parameter listeners: An array of `VoiceProcessorFrameListener` to be removed. public func removeFrameListeners(_ listeners: [VoiceProcessorFrameListener]) { lock.lock() for listener in listeners { @@ -121,18 +159,25 @@ public class VoiceProcessor { lock.unlock() } + /// Clears all currently registed frame listeners. public func clearFrameListeners() { lock.lock() frameListeners.removeAll() lock.unlock() } + // Adds an error listener. + /// + /// - Parameter listener: The `VoiceProcessorErrorListener` to be added as an error listener. public func addErrorListener(_ listener: VoiceProcessorErrorListener) { lock.lock() errorListeners.append(listener) lock.unlock() } + /// Removes a previously added error listener. + /// + /// - Parameter listener: The `VoiceProcessorErrorListener` to be removed. public func removeErrorListener(_ listener: VoiceProcessorErrorListener) { lock.lock() errorListeners.removeAll { @@ -141,12 +186,19 @@ public class VoiceProcessor { lock.unlock() } + /// Clears all error listeners. public func clearErrorListeners() { lock.lock() errorListeners.removeAll() lock.unlock() } + /// Starts audio recording with the specified audio properties. + /// + /// - Parameters: + /// - frameLength: The length of each audio frame, in number of samples. + /// - sampleRate: The sample rate to record audio at, in Hz. + /// - Throws: An error if there is an issue starting the audio recording. public func start(frameLength: UInt32, sampleRate: UInt32) throws { if frameLength == 0 { throw VoiceProcessorArgumentError("Frame length cannot be zero.") @@ -208,6 +260,9 @@ public class VoiceProcessor { isRecording_ = true } + /// Stops audio recording and releases audio resources. + /// + /// - Throws: An error if there is an issue stopping the audio recording. public func stop() throws { if !isRecording_ { return diff --git a/VoiceProcessorBuffer.swift b/VoiceProcessorBuffer.swift index 9712ffc..bc9cc89 100644 --- a/VoiceProcessorBuffer.swift +++ b/VoiceProcessorBuffer.swift @@ -1,12 +1,29 @@ +// +// Copyright 2023 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +/// A circular buffer for storing 16-bit integer audio samples. public class VoiceProcessorBuffer { private var buffer: [Int16] private var readIndex: Int = 0 private var writeIndex: Int = 0 + /// Initializes a new instance of the circular buffer with the specified size, in number of samples. + /// + /// - Parameter size: The size of the circular buffer, in number of samples. public init(size: Int) { buffer = [Int16](repeating: 0, count: size) } + /// Writes an array of audio samples to the circular buffer. + /// + /// - Parameter samples: An array of audio samples to write to the buffer. + /// - Throws: A `VoiceProcessorReadError` if the buffer overflows and samples are dropped. public func write(samples: [Int16]) throws { var numOverwrite = 0 for sample in samples { @@ -23,7 +40,11 @@ public class VoiceProcessorBuffer { throw VoiceProcessorReadError("Buffer overflow occurred - \(numOverwrite) samples dropped.") } } - + + /// Reads a specified number of audio samples from the circular buffer. + /// + /// - Parameter count: The number of samples to read from the buffer. + /// - Returns: An array of audio samples read from the buffer. Will either be the requested amount, or however many are available if that is less than `count`. public func read(count: Int) -> [Int16] { var samples: [Int16] = [] @@ -36,6 +57,9 @@ public class VoiceProcessorBuffer { return samples } + /// Returns the number of samples that are available to read from the buffer. + /// + /// - Returns: The number of available samples in the buffer. public func availableSamples() -> Int { let diff = writeIndex - readIndex return diff >= 0 ? diff : diff + buffer.count From 2424670262f276d896334210200a92058af7d850 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 11:12:42 -0700 Subject: [PATCH 05/14] small changes --- .../project.pbxproj | 26 ++++++------------- .../ios-voice-processor/ViewController.swift | 1 - VoiceProcessor.swift | 8 ++++-- 3 files changed, 14 insertions(+), 21 deletions(-) diff --git a/Example/ios-voice-processor.xcodeproj/project.pbxproj b/Example/ios-voice-processor.xcodeproj/project.pbxproj index 0b75b7d..9727711 100644 --- a/Example/ios-voice-processor.xcodeproj/project.pbxproj +++ b/Example/ios-voice-processor.xcodeproj/project.pbxproj @@ -9,7 +9,6 @@ /* Begin PBXBuildFile section */ 0247FE7E2A7062B500D368BE /* VoiceProcessorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0247FE7D2A7062B500D368BE /* VoiceProcessorTests.swift */; }; 0247FE8A2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */; }; - 0247FE8B2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 31BD2D0D93155E8AFCFC8373 /* Pods_ios_voice_processor_Example.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; 0247FEA72A70962B00D368BE /* VoiceProcessorBufferTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0247FEA62A70962B00D368BE /* VoiceProcessorBufferTests.swift */; }; 1E0EC0D026BC5B3100A5C080 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1E0EC0CD26BC59C800A5C080 /* Main.storyboard */; }; 1ED03A0226BC5B9B0078E0E7 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD71AFB9204008FA782 /* ViewController.swift */; }; @@ -30,20 +29,6 @@ }; /* End PBXContainerItemProxy section */ -/* Begin PBXCopyFilesBuildPhase section */ - 0247FE8C2A70660A00D368BE /* Embed Frameworks */ = { - isa = PBXCopyFilesBuildPhase; - buildActionMask = 2147483647; - dstPath = ""; - dstSubfolderSpec = 10; - files = ( - 0247FE8B2A70660A00D368BE /* Pods_ios_voice_processor_Example.framework in Embed Frameworks */, - ); - name = "Embed Frameworks"; - runOnlyForDeploymentPostprocessing = 0; - }; -/* End PBXCopyFilesBuildPhase section */ - /* Begin PBXFileReference section */ 0247FE7B2A7062B500D368BE /* ios-voice-processor_ExampleUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "ios-voice-processor_ExampleUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; 0247FE7D2A7062B500D368BE /* VoiceProcessorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceProcessorTests.swift; sourceTree = ""; }; @@ -213,7 +198,6 @@ 607FACCD1AFB9204008FA782 /* Frameworks */, 607FACCE1AFB9204008FA782 /* Resources */, 74B1AD03E059B510840AC16D /* [CP] Embed Pods Frameworks */, - 0247FE8C2A70660A00D368BE /* Embed Frameworks */, ); buildRules = ( ); @@ -501,9 +485,11 @@ CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + DEVELOPMENT_TEAM = 65723695GD; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu99; @@ -524,6 +510,7 @@ IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; + OTHER_CODE_SIGN_FLAGS = ""; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; @@ -555,9 +542,11 @@ CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + DEVELOPMENT_TEAM = 65723695GD; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu99; @@ -570,6 +559,7 @@ GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = NO; + OTHER_CODE_SIGN_FLAGS = ""; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; SWIFT_VERSION = 5.0; @@ -587,7 +577,7 @@ IPHONEOS_DEPLOYMENT_TARGET = 11.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; MODULE_NAME = ExampleApp; - PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice; + PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice.voiceprocessorexample; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_SWIFT3_OBJC_INFERENCE = Default; SWIFT_VERSION = 5.0; @@ -604,7 +594,7 @@ IPHONEOS_DEPLOYMENT_TARGET = 11.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; MODULE_NAME = ExampleApp; - PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice; + PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice.voiceprocessorexample; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_SWIFT3_OBJC_INFERENCE = Default; SWIFT_VERSION = 5.0; diff --git a/Example/ios-voice-processor/ViewController.swift b/Example/ios-voice-processor/ViewController.swift index 0d314c7..a3a124d 100644 --- a/Example/ios-voice-processor/ViewController.swift +++ b/Example/ios-voice-processor/ViewController.swift @@ -142,7 +142,6 @@ class ViewController: UIViewController { in: .userDomainMask, appropriateFor: nil, create: false) - print(outputDir) let fileUrl = outputDir.appendingPathComponent(audioFileName) if FileManager.default.fileExists(atPath: fileUrl.path) { diff --git a/VoiceProcessor.swift b/VoiceProcessor.swift index 8ac80be..cc7b367 100644 --- a/VoiceProcessor.swift +++ b/VoiceProcessor.swift @@ -353,7 +353,9 @@ public class VoiceProcessor { private func onFrame(_ frame: [Int16]) { lock.lock() for listener in frameListeners { - listener.onFrame(frame) + DispatchQueue.main.async { + listener.onFrame(frame) + } } lock.unlock() } @@ -361,7 +363,9 @@ public class VoiceProcessor { private func onError(_ error: VoiceProcessorError) { lock.lock() for listener in errorListeners { - listener.onError(error) + DispatchQueue.main.async { + listener.onError(error) + } } lock.unlock() } From 501cd6f8b34b6183cc3073363e5f8bdea96c70e8 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 12:46:36 -0700 Subject: [PATCH 06/14] actions --- .github/ISSUE_TEMPLATE/bug_report.md | 20 ++ .github/ISSUE_TEMPLATE/documentation.md | 14 ++ .github/ISSUE_TEMPLATE/feature_request.md | 20 ++ .github/workflows/ios-demo.yml | 43 ++++ .github/workflows/ios-tests.yml | 46 ++++ .github/workflows/link-check.yml | 18 ++ .github/workflows/spell-check.yml | 25 +++ .github/workflows/swift-codestyle.yml | 24 +++ .../VoiceProcessorTests.swift | 23 +- .../project.pbxproj | 4 +- .../ios-voice-processor-Example.xcscheme | 20 -- README.md | 8 +- VoiceProcessor.swift | 16 +- resources/.lint/spell-check/.cspell.json | 32 +++ resources/.lint/spell-check/dict.txt | 203 ++++++++++++++++++ resources/.lint/swift/.swiftlint.yml | 9 + 16 files changed, 476 insertions(+), 49 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/documentation.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/workflows/ios-demo.yml create mode 100644 .github/workflows/ios-tests.yml create mode 100644 .github/workflows/link-check.yml create mode 100644 .github/workflows/spell-check.yml create mode 100644 .github/workflows/swift-codestyle.yml create mode 100644 resources/.lint/spell-check/.cspell.json create mode 100644 resources/.lint/spell-check/dict.txt create mode 100644 resources/.lint/swift/.swiftlint.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..d80f978 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,20 @@ +--- +name: Bug report +about: Bugs in iOS Voice Processor +title: 'iOS Voice Processor Issue: ' +labels: bug +assignees: '' + +--- + +Make sure you have read the documentation, and have put forth a reasonable effort to find an existing answer. + +### Expected behaviour + + +### Actual behaviour + + +### Steps to reproduce the behaviour + +(Include enough details so that the issue can be reproduced independently.) diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/.github/ISSUE_TEMPLATE/documentation.md new file mode 100644 index 0000000..ed1307f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation.md @@ -0,0 +1,14 @@ +--- +name: Documentation +about: Issues around documentation of iOS Voice Processor +title: iOS Voice Processor Documentation Issue +labels: documentation +assignees: '' + +--- + +### What is the URL of the doc? + + + +### What's the nature of the issue? (e.g. steps do not work, typos/grammar/spelling, etc., out of date) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..8270d1c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: iOS Voice Processor suggestion +title: '' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/ios-demo.yml b/.github/workflows/ios-demo.yml new file mode 100644 index 0000000..b3f406d --- /dev/null +++ b/.github/workflows/ios-demo.yml @@ -0,0 +1,43 @@ +name: iOS Demo + +on: + workflow_dispatch: + push: + branches: [ main ] + paths: + - '.github/workflows/ios-demo.yml' + - 'example/**' + - '!example/REAMDE.md' + pull_request: + branches: [ main, 'v[0-9]+.[0-9]+' ] + paths: + - '.github/workflows/ios-demo.yml' + - 'example/**' + - '!example/REAMDE.md' + +jobs: + build-demo: + runs-on: macos-latest + defaults: + run: + working-directory: example + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install Cocoapods + run: gem install cocoapods + + - name: Run Cocoapods + run: pod install + + - name: XCode Build + run: xcodebuild build + -configuration Debug + -workspace ios-voice-processor.xcworkspace + -sdk iphoneos + -scheme ios-voice-processor-Example + -destination "platform=iOS Simulator,name=iPhone 12" + CODE_SIGNING_ALLOWED=NO + diff --git a/.github/workflows/ios-tests.yml b/.github/workflows/ios-tests.yml new file mode 100644 index 0000000..e0929a9 --- /dev/null +++ b/.github/workflows/ios-tests.yml @@ -0,0 +1,46 @@ +name: iOS Tests + +on: + workflow_dispatch: + push: + branches: [ main ] + paths: + - '.github/workflows/ios-tests.yml' + - "**" + - "!README.md" + - '!example/README.md' + pull_request: + branches: [ main, 'v[0-9]+.[0-9]+' ] + paths: + - '.github/workflows/ios-tests.yml' + - "**" + - "!README.md" + - '!example/README.md' + +defaults: + run: + working-directory: example + +jobs: + build: + name: Run iOS Tests + runs-on: mac-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install Cocoapods + run: gem install cocoapods + + - name: Run Cocoapods + run: pod install + + - name: XCode Build + run: xcodebuild test + -configuration Debug + -workspace ios-voice-processor.xcworkspace + -sdk iphoneos + -scheme ios-voice-processor-Example + -destination "platform=iOS Simulator,name=iPhone 12" + CODE_SIGNING_ALLOWED=NO diff --git a/.github/workflows/link-check.yml b/.github/workflows/link-check.yml new file mode 100644 index 0000000..ca92b2a --- /dev/null +++ b/.github/workflows/link-check.yml @@ -0,0 +1,18 @@ +name: Check Markdown links + +on: + workflow_dispatch: + push: + branches: [ main ] + pull_request: + branches: [ main, 'v[0-9]+.[0-9]+' ] + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: gaurav-nelson/github-action-markdown-link-check@1.0.14 + with: + use-quiet-mode: 'yes' + use-verbose-mode: 'yes' diff --git a/.github/workflows/spell-check.yml b/.github/workflows/spell-check.yml new file mode 100644 index 0000000..3bea714 --- /dev/null +++ b/.github/workflows/spell-check.yml @@ -0,0 +1,25 @@ +name: SpellCheck + +on: + workflow_dispatch: + push: + branches: [ master ] + pull_request: + branches: [ master, 'v[0-9]+.[0-9]+' ] + +jobs: + markdown: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v3 + with: + node-version: 18 + + - name: Install CSpell + run: npm install -g cspell + + - name: Run CSpell + run: cspell --config resources/.lint/spell-check/.cspell.json "**/*" diff --git a/.github/workflows/swift-codestyle.yml b/.github/workflows/swift-codestyle.yml new file mode 100644 index 0000000..e875561 --- /dev/null +++ b/.github/workflows/swift-codestyle.yml @@ -0,0 +1,24 @@ +name: Swift Codestyle + +on: + workflow_dispatch: + push: + branches: [ main ] + paths: + - '**/*.swift' + pull_request: + branches: [ main, 'v[0-9]+.[0-9]+' ] + paths: + - '**/*.swift' + +jobs: + check-switch-codestyle: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Check swift codestyle + uses: norio-nomura/action-swiftlint@3.2.1 + with: + args: lint --config resources/.lint/swift/.swiftlint.yml --strict diff --git a/Example/VoiceProcessorTests/VoiceProcessorTests.swift b/Example/VoiceProcessorTests/VoiceProcessorTests.swift index 8195a75..a9698eb 100644 --- a/Example/VoiceProcessorTests/VoiceProcessorTests.swift +++ b/Example/VoiceProcessorTests/VoiceProcessorTests.swift @@ -16,10 +16,10 @@ class VoiceProcessorTests: XCTestCase { let frameLength: UInt32 = 512 let sampleRate: UInt32 = 16000 - + var frameCount = 0 var errorCount = 0 - + override func setUp() { super.setUp() continueAfterFailure = false @@ -28,18 +28,7 @@ class VoiceProcessorTests: XCTestCase { override func tearDown() { super.tearDown() } - - func testGetInstance() { - let vp = VoiceProcessor.instance - if (!vp.hasRecordAudioPermission) { - AVAudioSession.sharedInstance().requestRecordPermission { granted in - if !granted { - XCTFail() - } - } - } - } - + func testBasic() throws { let vp = VoiceProcessor.instance @@ -57,9 +46,11 @@ class VoiceProcessorTests: XCTestCase { vp.addFrameListener(vpFrameListener) vp.addErrorListener(vpErrorListener) try vp.start(frameLength: frameLength, sampleRate: sampleRate) + XCTAssertEqual(vp.frameLength, frameLength) + XCTAssertEqual(vp.sampleRate, sampleRate) XCTAssert(vp.isRecording == true) - sleep(1) + sleep(3) try vp.stop() @@ -72,7 +63,7 @@ class VoiceProcessorTests: XCTestCase { frameCount = 0 errorCount = 0 } - + func testInvalidSetup() throws { let vp = VoiceProcessor.instance diff --git a/Example/ios-voice-processor.xcodeproj/project.pbxproj b/Example/ios-voice-processor.xcodeproj/project.pbxproj index 9727711..ebc8f3c 100644 --- a/Example/ios-voice-processor.xcodeproj/project.pbxproj +++ b/Example/ios-voice-processor.xcodeproj/project.pbxproj @@ -414,7 +414,7 @@ DEVELOPMENT_TEAM = 65723695GD; GCC_C_LANGUAGE_STANDARD = gnu11; GENERATE_INFOPLIST_FILE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 15.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; MARKETING_VERSION = 1.0; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; @@ -447,7 +447,7 @@ DEVELOPMENT_TEAM = 65723695GD; GCC_C_LANGUAGE_STANDARD = gnu11; GENERATE_INFOPLIST_FILE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 15.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; MARKETING_VERSION = 1.0; MTL_FAST_MATH = YES; diff --git a/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme b/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme index 1a29885..59be594 100644 --- a/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme +++ b/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme @@ -51,16 +51,6 @@ - - - - - - - - Date: Thu, 27 Jul 2023 14:00:48 -0700 Subject: [PATCH 07/14] readme --- .github/workflows/spell-check.yml | 4 +- Example/README.md | 30 ++++ .../ios-voice-processor/ViewController.swift | 47 ++++--- README.md | 129 +++++++++++++++--- 4 files changed, 167 insertions(+), 43 deletions(-) create mode 100644 Example/README.md diff --git a/.github/workflows/spell-check.yml b/.github/workflows/spell-check.yml index 3bea714..8c5eac7 100644 --- a/.github/workflows/spell-check.yml +++ b/.github/workflows/spell-check.yml @@ -3,9 +3,9 @@ name: SpellCheck on: workflow_dispatch: push: - branches: [ master ] + branches: [ main ] pull_request: - branches: [ master, 'v[0-9]+.[0-9]+' ] + branches: [ main, 'v[0-9]+.[0-9]+' ] jobs: markdown: diff --git a/Example/README.md b/Example/README.md new file mode 100644 index 0000000..65734c4 --- /dev/null +++ b/Example/README.md @@ -0,0 +1,30 @@ +# iOS Voice Processor Example + +This is an example app that demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. + +## Requirements + +- [XCode](https://developer.apple.com/xcode/) +- [CocoaPods](https://cocoapods.org/) + +## Compatibility + +- iOS 11.0+ + +## Building + +Install the `ios-voice-processor` pod: +```console +cd example +pod install +``` + +Open the generated `ios-voice-processor.xcworkspace` file with XCode and build the project (`Product > Build` or `Product > Run`). + +## Usage + +Toggle recording on and off with the button in the center of the screen. While recording, the VU meter on the screen will respond to the volume of incoming audio. + +## Running the Unit Tests + +Ensure you have an iOS device connected or simulator running. Run tests with XCode (`Product > Test`). \ No newline at end of file diff --git a/Example/ios-voice-processor/ViewController.swift b/Example/ios-voice-processor/ViewController.swift index a3a124d..c6f1bd8 100644 --- a/Example/ios-voice-processor/ViewController.swift +++ b/Example/ios-voice-processor/ViewController.swift @@ -42,13 +42,37 @@ class ViewController: UIViewController { let frameListener = VoiceProcessorFrameListener(audioCallback) VoiceProcessor.instance.addFrameListener(frameListener) + + let errorListener = VoiceProcessorErrorListener(errorCallback) + VoiceProcessor.instance.addErrorListener(errorListener) } - override func didReceiveMemoryWarning() { - super.didReceiveMemoryWarning() - // Dispose of any resources that can be recreated. - } + private func audioCallback(frame: [Int16]) -> Void { + if DUMP_AUDIO { + recordedAudio.append(contentsOf: frame) + } + let sum = frame.reduce(0) { $0 + (Double($1) * Double($1)) } + let rms = sqrt(sum / Double(frame.count)) + + let dbfs = 20 * log10(rms / Double(INT16_MAX)) + + DispatchQueue.main.async { + self.vuMeterView.addVolumeValue(dbfsValue: dbfs) + } + } + + private func errorCallback(error: VoiceProcessorError) -> Void { + DispatchQueue.main.async { + let alert = UIAlertController( + title: "Alert", + message: "Voice processor error: \(error)", + preferredStyle: UIAlertController.Style.alert) + alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil)) + self.present(alert, animated: true, completion: nil) + } + } + @IBAction func toggleStartButton(_ sender: UIButton) { if !isRecording { startRecording() @@ -121,21 +145,6 @@ class ViewController: UIViewController { } } - private func audioCallback(frame: [Int16]) -> Void { - if DUMP_AUDIO { - recordedAudio.append(contentsOf: frame) - } - - let sum = frame.reduce(0) { $0 + (Double($1) * Double($1)) } - let rms = sqrt(sum / Double(frame.count)) - - let dbfs = 20 * log10(rms / Double(INT16_MAX)) - - DispatchQueue.main.async { - self.vuMeterView.addVolumeValue(dbfsValue: dbfs) - } - } - private func dumpAudio(audioData: [Int16], audioFileName: String) throws { let outputDir = try FileManager.default.url( for: .documentDirectory, diff --git a/README.md b/README.md index e23f1ee..e93860b 100644 --- a/README.md +++ b/README.md @@ -1,45 +1,130 @@ -# ios-voice-processor +# iOS Voice Processor -A Cocoa Pod library for real-time voice processing. +[![GitHub release](https://img.shields.io/github/release/Picovoice/ios-voice-processor.svg)](https://github.com/Picovoice/ios-voice-processor/releases) +[![GitHub](https://img.shields.io/github/license/Picovoice/ios-voice-processor)](https://github.com/Picovoice/ios-voice-processor/) + +[![Maven Central](https://img.shields.io/maven-central/v/ai.picovoice/android-voice-processor?label=maven-central)](https://repo1.maven.org/maven2/ai/picovoice/android-voice-processor/) + +[![Cocoapods](https://img.shields.io/cocoapods/v/ios-voice-processor)](https://github.com/CocoaPods/Specs/tree/master/Specs/8/5/4/ios-voice-processor) + +Made in Vancouver, Canada by [Picovoice](https://picovoice.ai) + + +[![Twitter URL](https://img.shields.io/twitter/url?label=%40AiPicovoice&style=social&url=https%3A%2F%2Ftwitter.com%2FAiPicovoice)](https://twitter.com/AiPicovoice) + +[![YouTube Channel Views](https://img.shields.io/youtube/channel/views/UCAdi9sTCXLosG1XeqDwLx7w?label=YouTube&style=social)](https://www.youtube.com/channel/UCAdi9sTCXLosG1XeqDwLx7w) + +The iOS Voice Processor is an asynchronous audio capture library designed for real-time audio +processing. Given some specifications, the library delivers frames of raw audio data to the user via +listeners. + +## Table of Contents + +- [iOS Voice Processor](#ios-voice-processor) + - [Table of Contents](#table-of-contents) + - [Requirements](#requirements) + - [Compatibility](#compatibility) + - [Installation](#installation) + - [Permissions](#permissions) + - [Usage](#usage) + - [Capturing with Multiple Listeners](#capturing-with-multiple-listeners) + - [Example](#example) + +## Requirements + +- [XCode](https://developer.apple.com/xcode/) +- [CocoaPods](https://cocoapods.org/) + +## Compatibility + +- iOS 11.0+ + +## Installation + +iOS Voice Processor is available via CocoaPods. To import it into your iOS project, add the following line to your Podfile: +```ruby +pod 'ios-voice-processor' +``` + +## Permissions + +To enable recording with your iOS device's microphone you must add the following to your app's `Info.plist` file: +```xml +NSMicrophoneUsageDescription +[Permission explanation] +``` + +See our [example app](example/) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. ## Usage -### Create callback: +Access the singleton instance of `VoiceProcessor`: ```swift -func audioCallback(pcm: [Int16]) -> Void { - // do something with pcm - print("Recevied pcm with length: ", pcm.count) +import ios_voice_processor + +let voiceProcessor = VoiceProcessor.instance +``` + +Add listeners for audio frames and errors: + +```swift +let frameListener = VoiceProcessorFrameListener { frame in + // use audio } + +let errorListener = VoiceProcessorErrorListener { error in + // handle error +} + +voiceProcessor.addFrameListener(frameListener); +voiceProcessor.addErrorListener(errorListener); ``` -### Start Audio: +Start audio capture with the desired frame length and audio sample rate: ```swift do { - guard try VoiceProcessor.shared.hasPermissions() else { - print("Permissions denied.") - return - } - - try VoiceProcessor.shared.start( - frameLength: 512, - sampleRate: 16000, - audioCallback: self.audioCallback) + try voiceProcessor.start(frameLength: 512, sampleRate: 16000); } catch { - print("Could not start voice processor.") - return + // handle start error } ``` -### Stop Audio: +Stop audio capture: +```swift +do { + try voiceProcessor.stop(frameLength: 512, sampleRate: 16000); +} catch { + +``` + +Once audio capture has started successfully, any frame listeners assigned to the `VoiceProcessor` +will start receiving audio frames with the given `frameLength` and `sampleRate`. + +### Capturing with Multiple Listeners +Any number of listeners can be added to and removed from the `VoiceProcessor` instance. However, +the instance can only record audio with a single audio configuration (`frameLength` and `sampleRate`), +which all listeners will receive once a call to `start()` has been made. To add multiple listeners: ```swift -VoiceProcessor.shared.stop() +let listener1 = VoiceProcessorFrameListener({_ in }) +let listener2 = VoiceProcessorFrameListener({_ in }) +let listeners: [VoiceProcessorFrameListener] = [listener1, listener2]; + +voiceProcessor.addFrameListeners(listeners); + +voiceProcessor.removeFrameListeners(listeners); +// or +voiceProcessor.clearFrameListeners(); ``` ## Example -To run the example, go to [example](/example). +The [iOS Voice Processor app](example/) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. + +## Releases + +### v1.0.0 - August 5, 2021 -Run `pod install` and then open the example directory in xcode. Then run it in your device or simulator. +- Initial public release. From 6b4fac2f9ca17de9d51eba61a63c026bd9a7b1f1 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 14:10:56 -0700 Subject: [PATCH 08/14] fixes --- .../VoiceProcessorBufferTests.swift | 12 ++++++------ README.md | 6 +++--- VoiceProcessor.swift | 18 +++++++++++++----- resources/.lint/spell-check/dict.txt | 3 +++ 4 files changed, 25 insertions(+), 14 deletions(-) diff --git a/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift b/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift index 5304090..7b5ccaf 100644 --- a/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift +++ b/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift @@ -32,21 +32,21 @@ class VoiceProcessorBufferTests: XCTestCase { let writeSamples: [Int16] = [1, 2, 3, 4, 5] let vpBuffer = VoiceProcessorBuffer(size: writeSamples.count + 1) XCTAssertEqual(vpBuffer.availableSamples(), 0) - + try? vpBuffer.write(samples: writeSamples) XCTAssertEqual(vpBuffer.availableSamples(), writeSamples.count) let readSamples = vpBuffer.read(count: 4) XCTAssertEqual(vpBuffer.availableSamples(), writeSamples.count - readSamples.count) - + let writeSamples2: [Int16] = [6, 7] try? vpBuffer.write(samples: writeSamples2) XCTAssertEqual(vpBuffer.availableSamples(), writeSamples.count - readSamples.count + writeSamples2.count) - let _ = vpBuffer.read(count: 3) + _ = vpBuffer.read(count: 3) XCTAssertEqual(vpBuffer.availableSamples(), 0) } - + func testOverwrite() { let samplesToFill: [Int16] = [1, 2, 3, 4, 5] let vpBuffer = VoiceProcessorBuffer(size: samplesToFill.count + 1) @@ -56,7 +56,7 @@ class VoiceProcessorBufferTests: XCTestCase { XCTAssertThrowsError(try vpBuffer.write(samples: additionalSamples)) { error in XCTAssert(error is VoiceProcessorError) } - + let expectedSamples: [Int16] = [3, 4, 5, 6, 7] let readSamples = vpBuffer.read(count: expectedSamples.count) XCTAssertEqual(readSamples, expectedSamples) @@ -65,7 +65,7 @@ class VoiceProcessorBufferTests: XCTestCase { func testReadMoreThanAvailable() { let samplesToFill: [Int16] = [1, 2, 3, 4, 5] let vpBuffer = VoiceProcessorBuffer(size: samplesToFill.count + 1) - + try? vpBuffer.write(samples: samplesToFill) let readSamples = vpBuffer.read(count: 10) XCTAssertEqual(readSamples.count, samplesToFill.count) diff --git a/README.md b/README.md index e93860b..39002b4 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ To enable recording with your iOS device's microphone you must add the following [Permission explanation] ``` -See our [example app](example/) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. +See our [example app](./example) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. ## Usage @@ -96,7 +96,7 @@ Stop audio capture: do { try voiceProcessor.stop(frameLength: 512, sampleRate: 16000); } catch { - +} ``` Once audio capture has started successfully, any frame listeners assigned to the `VoiceProcessor` @@ -121,7 +121,7 @@ voiceProcessor.clearFrameListeners(); ## Example -The [iOS Voice Processor app](example/) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. +The [iOS Voice Processor app](./example) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. ## Releases diff --git a/VoiceProcessor.swift b/VoiceProcessor.swift index 185be21..cf7937b 100644 --- a/VoiceProcessor.swift +++ b/VoiceProcessor.swift @@ -160,7 +160,7 @@ public class VoiceProcessor { lock.unlock() } - /// Clears all currently registed frame listeners. + /// Clears all currently registered frame listeners. public func clearFrameListeners() { lock.lock() frameListeners.removeAll() @@ -211,7 +211,7 @@ public class VoiceProcessor { circularBuffer = VoiceProcessorBuffer(size: Int(frameLength * 10)) if isRecording_ { - if (frameLength != frameLength_ || sampleRate != sampleRate_) { + if frameLength != frameLength_ || sampleRate != sampleRate_ { throw VoiceProcessorArgumentError(""" VoiceProcessor start() was called with frame length \(frameLength) and sample rate \(sampleRate) while already recording @@ -292,7 +292,9 @@ public class VoiceProcessor { return } - let bufferPtr = bufferRef.pointee.mAudioData.bindMemory(to: Int16.self, capacity: Int(bufferRef.pointee.mAudioDataByteSize) / MemoryLayout.size) + let bufferPtr = bufferRef.pointee.mAudioData.bindMemory( + to: Int16.self, + capacity: Int(bufferRef.pointee.mAudioDataByteSize) / MemoryLayout.size) let samples = Array(UnsafeBufferPointer(start: bufferPtr, count: Int(numPackets))) do { @@ -306,8 +308,14 @@ public class VoiceProcessor { if circularBuffer.availableSamples() >= frameLength { let frame = circularBuffer.read(count: Int(frameLength)) - if (frame.count != frameLength) { - self.onError(VoiceProcessorReadError("Circular buffer returned a frame of size \(frame.count) (frameLength is \(frameLength))")) + if frame.count != frameLength { + self.onError( + VoiceProcessorReadError( + """ + Circular buffer returned a frame of + size \(frame.count) (frameLength is \(frameLength)) + """ + )) } self.onFrame(frame) } diff --git a/resources/.lint/spell-check/dict.txt b/resources/.lint/spell-check/dict.txt index 6f519ba..727db61 100644 --- a/resources/.lint/spell-check/dict.txt +++ b/resources/.lint/spell-check/dict.txt @@ -40,6 +40,7 @@ coolterm cortexm cporcupine cstring +dbfs dkgray dlfcn dlopen @@ -183,6 +184,8 @@ testcases timecode tmpdir unitypackage +unmanaged +Unretained uppercased vtable wakeword From 7ca4c915cc91f0529676dd9ee1ef5bbaa31d85c1 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 15:00:36 -0700 Subject: [PATCH 09/14] swift style --- .../VoiceProcessorTests.swift | 122 +++++++++--------- Example/ios-voice-processor/AppDelegate.swift | 5 +- Example/ios-voice-processor/VUMeterView.swift | 20 +-- .../ios-voice-processor/ViewController.swift | 50 +++---- README.md | 4 +- VoiceProcessor.swift | 22 ++-- VoiceProcessorBuffer.swift | 7 +- VoiceProcessorErrors.swift | 7 +- 8 files changed, 117 insertions(+), 120 deletions(-) diff --git a/Example/VoiceProcessorTests/VoiceProcessorTests.swift b/Example/VoiceProcessorTests/VoiceProcessorTests.swift index a9698eb..16d920c 100644 --- a/Example/VoiceProcessorTests/VoiceProcessorTests.swift +++ b/Example/VoiceProcessorTests/VoiceProcessorTests.swift @@ -30,7 +30,7 @@ class VoiceProcessorTests: XCTestCase { } func testBasic() throws { - let vp = VoiceProcessor.instance + let voiceProcessor = VoiceProcessor.instance let vpFrameListener = VoiceProcessorFrameListener { frame in XCTAssert(frame.count == self.frameLength) @@ -42,91 +42,91 @@ class VoiceProcessorTests: XCTestCase { self.errorCount += 1 } - XCTAssert(vp.isRecording == false) - vp.addFrameListener(vpFrameListener) - vp.addErrorListener(vpErrorListener) - try vp.start(frameLength: frameLength, sampleRate: sampleRate) - XCTAssertEqual(vp.frameLength, frameLength) - XCTAssertEqual(vp.sampleRate, sampleRate) - XCTAssert(vp.isRecording == true) + XCTAssert(voiceProcessor.isRecording == false) + voiceProcessor.addFrameListener(vpFrameListener) + voiceProcessor.addErrorListener(vpErrorListener) + try voiceProcessor.start(frameLength: frameLength, sampleRate: sampleRate) + XCTAssertEqual(voiceProcessor.frameLength, frameLength) + XCTAssertEqual(voiceProcessor.sampleRate, sampleRate) + XCTAssert(voiceProcessor.isRecording == true) sleep(3) - try vp.stop() + try voiceProcessor.stop() XCTAssert(frameCount > 0) XCTAssert(errorCount == 0) - XCTAssert(vp.isRecording == false) + XCTAssert(voiceProcessor.isRecording == false) - vp.clearErrorListeners() - vp.clearFrameListeners() + voiceProcessor.clearErrorListeners() + voiceProcessor.clearFrameListeners() frameCount = 0 errorCount = 0 } func testInvalidSetup() throws { - let vp = VoiceProcessor.instance + let voiceProcessor = VoiceProcessor.instance - XCTAssertThrowsError(try vp.start(frameLength: 0, sampleRate: 16000)) { error in + XCTAssertThrowsError(try voiceProcessor.start(frameLength: 0, sampleRate: 16000)) { error in XCTAssert(error is VoiceProcessorArgumentError) } - XCTAssertThrowsError(try vp.start(frameLength: 512, sampleRate: 0)) { error in + XCTAssertThrowsError(try voiceProcessor.start(frameLength: 512, sampleRate: 0)) { error in XCTAssert(error is VoiceProcessorArgumentError) } - try vp.start(frameLength: frameLength, sampleRate: sampleRate) + try voiceProcessor.start(frameLength: frameLength, sampleRate: sampleRate) - XCTAssertThrowsError(try vp.start(frameLength: 1024, sampleRate: 44100)) { error in + XCTAssertThrowsError(try voiceProcessor.start(frameLength: 1024, sampleRate: 44100)) { error in XCTAssert(error is VoiceProcessorArgumentError) } - try vp.stop() + try voiceProcessor.stop() } func testAddRemoveListeners() { - let vp = VoiceProcessor.instance - - let f1 = VoiceProcessorFrameListener({_ in }) - let f2 = VoiceProcessorFrameListener({_ in }) - - let e1 = VoiceProcessorErrorListener({_ in }) - let e2 = VoiceProcessorErrorListener({_ in }) - - vp.addFrameListener(f1); - XCTAssertEqual(vp.numFrameListeners, 1); - vp.addFrameListener(f2); - XCTAssertEqual(vp.numFrameListeners, 2); - vp.removeFrameListener(f1); - XCTAssertEqual(vp.numFrameListeners, 1); - vp.removeFrameListener(f1); - XCTAssertEqual(vp.numFrameListeners, 1); - vp.removeFrameListener(f2); - XCTAssertEqual(vp.numFrameListeners, 0); - - let fs: [VoiceProcessorFrameListener] = [f1, f2]; - vp.addFrameListeners(fs); - XCTAssertEqual(vp.numFrameListeners, 2); - vp.removeFrameListeners(fs); - XCTAssertEqual(vp.numFrameListeners, 0); - vp.addFrameListeners(fs); - XCTAssertEqual(vp.numFrameListeners, 2); - vp.clearFrameListeners(); - XCTAssertEqual(vp.numFrameListeners, 0); - - vp.addErrorListener(e1); - XCTAssertEqual(vp.numErrorListeners, 1); - vp.addErrorListener(e2); - XCTAssertEqual(vp.numErrorListeners, 2); - vp.removeErrorListener(e1); - XCTAssertEqual(vp.numErrorListeners, 1); - vp.removeErrorListener(e1); - XCTAssertEqual(vp.numErrorListeners, 1); - vp.removeErrorListener(e2); - XCTAssertEqual(vp.numErrorListeners, 0); - vp.addErrorListener(e1); - XCTAssertEqual(vp.numErrorListeners, 1); - vp.clearErrorListeners(); - XCTAssertEqual(vp.numErrorListeners, 0); + let voiceProcessor = VoiceProcessor.instance + + let frameListener1 = VoiceProcessorFrameListener({ _ in }) + let frameListener2 = VoiceProcessorFrameListener({ _ in }) + + let errorListener1 = VoiceProcessorErrorListener({ _ in }) + let errorListener2 = VoiceProcessorErrorListener({ _ in }) + + voiceProcessor.addFrameListener(frameListener1) + XCTAssertEqual(voiceProcessor.numFrameListeners, 1) + voiceProcessor.addFrameListener(frameListener2) + XCTAssertEqual(voiceProcessor.numFrameListeners, 2) + voiceProcessor.removeFrameListener(frameListener1) + XCTAssertEqual(voiceProcessor.numFrameListeners, 1) + voiceProcessor.removeFrameListener(frameListener1) + XCTAssertEqual(voiceProcessor.numFrameListeners, 1) + voiceProcessor.removeFrameListener(frameListener2) + XCTAssertEqual(voiceProcessor.numFrameListeners, 0) + + let frameListeners: [VoiceProcessorFrameListener] = [frameListener1, frameListener2] + voiceProcessor.addFrameListeners(frameListeners) + XCTAssertEqual(voiceProcessor.numFrameListeners, 2) + voiceProcessor.removeFrameListeners(frameListeners) + XCTAssertEqual(voiceProcessor.numFrameListeners, 0) + voiceProcessor.addFrameListeners(frameListeners) + XCTAssertEqual(voiceProcessor.numFrameListeners, 2) + voiceProcessor.clearFrameListeners() + XCTAssertEqual(voiceProcessor.numFrameListeners, 0) + + voiceProcessor.addErrorListener(errorListener1) + XCTAssertEqual(voiceProcessor.numErrorListeners, 1) + voiceProcessor.addErrorListener(errorListener2) + XCTAssertEqual(voiceProcessor.numErrorListeners, 2) + voiceProcessor.removeErrorListener(errorListener1) + XCTAssertEqual(voiceProcessor.numErrorListeners, 1) + voiceProcessor.removeErrorListener(errorListener1) + XCTAssertEqual(voiceProcessor.numErrorListeners, 1) + voiceProcessor.removeErrorListener(errorListener2) + XCTAssertEqual(voiceProcessor.numErrorListeners, 0) + voiceProcessor.addErrorListener(errorListener1) + XCTAssertEqual(voiceProcessor.numErrorListeners, 1) + voiceProcessor.clearErrorListeners() + XCTAssertEqual(voiceProcessor.numErrorListeners, 0) } } diff --git a/Example/ios-voice-processor/AppDelegate.swift b/Example/ios-voice-processor/AppDelegate.swift index f93e9ca..eca6dc9 100644 --- a/Example/ios-voice-processor/AppDelegate.swift +++ b/Example/ios-voice-processor/AppDelegate.swift @@ -13,8 +13,9 @@ import UIKit class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? - - func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { + func application( + _ application: UIApplication, + didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { // Override point for customization after application launch. return true } diff --git a/Example/ios-voice-processor/VUMeterView.swift b/Example/ios-voice-processor/VUMeterView.swift index 3faf3a3..ac0ad9b 100644 --- a/Example/ios-voice-processor/VUMeterView.swift +++ b/Example/ios-voice-processor/VUMeterView.swift @@ -11,24 +11,24 @@ import UIKit class VUMeterView: UIView { - private let DBFS_OFFSET = 60.0 - private let VOLUME_HISTORY_CAPACITY = 5 - + private let dbfsOffset = 60.0 + private let volumeHistoryCapacity = 5 + private var volumeHistory: [Double] = [] private var volumeAverage: Double = 0 public func addVolumeValue(dbfsValue: Double) { - - var adjustedValue = dbfsValue + DBFS_OFFSET - adjustedValue = (max(0.0, adjustedValue) / DBFS_OFFSET) + + var adjustedValue = dbfsValue + dbfsOffset + adjustedValue = (max(0.0, adjustedValue) / dbfsOffset) adjustedValue = min(1.0, adjustedValue) - - if volumeHistory.count == VOLUME_HISTORY_CAPACITY { + + if volumeHistory.count == volumeHistoryCapacity { volumeHistory.removeFirst() } volumeHistory.append(adjustedValue) volumeAverage = volumeHistory.reduce(0, +) / Double(volumeHistory.count) - + setNeedsDisplay() } @@ -39,7 +39,7 @@ class VUMeterView: UIView { let emptyRect = CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height) context?.setFillColor(UIColor.gray.cgColor) context?.fill(emptyRect) - + let meterRect = CGRect(x: 0, y: 0, width: bounds.width * CGFloat(volumeAverage), height: bounds.height) context?.setFillColor(UIColor(red: 0.216, green: 0.49, blue: 1, alpha: 1).cgColor) context?.fill(meterRect) diff --git a/Example/ios-voice-processor/ViewController.swift b/Example/ios-voice-processor/ViewController.swift index c6f1bd8..2dcd102 100644 --- a/Example/ios-voice-processor/ViewController.swift +++ b/Example/ios-voice-processor/ViewController.swift @@ -16,10 +16,10 @@ class ViewController: UIViewController { @IBOutlet weak var startButton: UIButton! @IBOutlet weak var vuMeterView: VUMeterView! - private let FRAME_LENGTH: UInt32 = 512 - private let SAMPLE_RATE: UInt32 = 16000 - private let DUMP_AUDIO: Bool = false - + private let frameLength: UInt32 = 512 + private let sampleRate: UInt32 = 16000 + private let dumpAudio: Bool = false + private var isRecording: Bool = false private var recordedAudio: [Int16] = [] @@ -30,39 +30,42 @@ class ViewController: UIViewController { let startButtonSize = CGSize(width: 120, height: 120) startButton.frame.size = startButtonSize - startButton.frame.origin = - CGPoint(x: (viewSize.width - startButtonSize.width) / 2, y: (viewSize.height - startButtonSize.height - 40)) + startButton.frame.origin = CGPoint( + x: (viewSize.width - startButtonSize.width) / 2, + y: (viewSize.height - startButtonSize.height - 40)) startButton.layer.cornerRadius = 0.5 * startButton.bounds.size.width startButton.clipsToBounds = true let vuMeterSize = CGSize(width: view.frame.width - 20, height: 80) vuMeterView.frame.size = vuMeterSize - vuMeterView.frame.origin = CGPoint(x: (viewSize.width - vuMeterSize.width) / 2, y: (viewSize.height - vuMeterSize.height) / 2) + vuMeterView.frame.origin = CGPoint( + x: (viewSize.width - vuMeterSize.width) / 2, + y: (viewSize.height - vuMeterSize.height) / 2) vuMeterView.clipsToBounds = true - + let frameListener = VoiceProcessorFrameListener(audioCallback) VoiceProcessor.instance.addFrameListener(frameListener) - + let errorListener = VoiceProcessorErrorListener(errorCallback) VoiceProcessor.instance.addErrorListener(errorListener) } - private func audioCallback(frame: [Int16]) -> Void { - if DUMP_AUDIO { + private func audioCallback(frame: [Int16]) { + if dumpAudio { recordedAudio.append(contentsOf: frame) } let sum = frame.reduce(0) { $0 + (Double($1) * Double($1)) } let rms = sqrt(sum / Double(frame.count)) - + let dbfs = 20 * log10(rms / Double(INT16_MAX)) - + DispatchQueue.main.async { self.vuMeterView.addVolumeValue(dbfsValue: dbfs) } } - - private func errorCallback(error: VoiceProcessorError) -> Void { + + private func errorCallback(error: VoiceProcessorError) { DispatchQueue.main.async { let alert = UIAlertController( title: "Alert", @@ -72,7 +75,7 @@ class ViewController: UIViewController { self.present(alert, animated: true, completion: nil) } } - + @IBAction func toggleStartButton(_ sender: UIButton) { if !isRecording { startRecording() @@ -88,18 +91,18 @@ class ViewController: UIViewController { return } - if DUMP_AUDIO { + if dumpAudio { recordedAudio.removeAll() } - try VoiceProcessor.instance.start(frameLength: FRAME_LENGTH, sampleRate: SAMPLE_RATE) + try VoiceProcessor.instance.start(frameLength: frameLength, sampleRate: sampleRate) } catch { let alert = UIAlertController( title: "Alert", message: "Could not start voice processor.", preferredStyle: UIAlertController.Style.alert) alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil)) - self.present(alert, animated: true, completion: nil) + present(alert, animated: true, completion: nil) return } isRecording = true @@ -115,12 +118,12 @@ class ViewController: UIViewController { message: "Could not stop voice processor.", preferredStyle: UIAlertController.Style.alert) alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.default, handler: nil)) - self.present(alert, animated: true, completion: nil) + present(alert, animated: true, completion: nil) return } isRecording = false - if DUMP_AUDIO { + if dumpAudio { do { try dumpAudio(audioData: recordedAudio, audioFileName: "ios_voice_processor.wav") } catch { @@ -130,7 +133,7 @@ class ViewController: UIViewController { startButton.setTitle("START", for: UIControl.State.normal) } - private func onUserPermissionResponse(isGranted: Bool) -> Void { + private func onUserPermissionResponse(isGranted: Bool) { DispatchQueue.main.async { if isGranted { self.startRecording() @@ -158,7 +161,7 @@ class ViewController: UIViewController { } let audioFormat = AVAudioFormat( commonFormat: .pcmFormatInt16, - sampleRate: Double(SAMPLE_RATE), + sampleRate: Double(sampleRate), channels: 1, interleaved: true)! @@ -175,4 +178,3 @@ class ViewController: UIViewController { try audioFile.write(from: writeBuffer) } } - diff --git a/README.md b/README.md index 39002b4..2467293 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ To enable recording with your iOS device's microphone you must add the following [Permission explanation] ``` -See our [example app](./example) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. +See our [example app](example) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. ## Usage @@ -121,7 +121,7 @@ voiceProcessor.clearFrameListeners(); ## Example -The [iOS Voice Processor app](./example) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. +The [iOS Voice Processor app](example) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. ## Releases diff --git a/VoiceProcessor.swift b/VoiceProcessor.swift index cf7937b..073a449 100644 --- a/VoiceProcessor.swift +++ b/VoiceProcessor.swift @@ -10,7 +10,7 @@ import AVFoundation /// Typealias for the callback function that handles frames of audio data. -public typealias VoiceProcessorFrameCallback = ([Int16]) -> () +public typealias VoiceProcessorFrameCallback = ([Int16]) -> Void /// Listener class for receiving audio frames from `VoiceProcessor` via the `onFrame` property. public class VoiceProcessorFrameListener { @@ -25,14 +25,12 @@ public class VoiceProcessorFrameListener { /// Function called when a frame of audio is received. public var onFrame: VoiceProcessorFrameCallback { - get { - callback_ - } + callback_ } } /// Typealias for the callback function that handles errors that are emitted from `VoiceProcessor`. -public typealias VoiceProcessorErrorCallback = (VoiceProcessorError) -> () +public typealias VoiceProcessorErrorCallback = (VoiceProcessorError) -> Void /// Listener class for receiving errors from `VoiceProcessor` via the `onError` property. public class VoiceProcessorErrorListener { @@ -47,9 +45,7 @@ public class VoiceProcessorErrorListener { /// Function called when a `VoiceProcessorError` occurs. public var onError: VoiceProcessorErrorCallback { - get { - callback_ - } + callback_ } } @@ -70,8 +66,8 @@ public class VoiceProcessor { private var errorListeners: [VoiceProcessorErrorListener] = [] private var isRecording_: Bool = false - private var frameLength_: UInt32? = nil - private var sampleRate_: UInt32? = nil + private var frameLength_: UInt32? + private var sampleRate_: UInt32? /// A boolean value indicating if the `VoiceProcessor` is currently recording audio. public var isRecording: Bool { @@ -167,7 +163,7 @@ public class VoiceProcessor { lock.unlock() } - // Adds an error listener. + /// Adds an error listener. /// /// - Parameter listener: The `VoiceProcessorErrorListener` to be added as an error listener. public func addErrorListener(_ listener: VoiceProcessorErrorListener) { @@ -279,7 +275,7 @@ public class VoiceProcessor { } private func createAudioQueueCallback() -> AudioQueueInputCallback { - { userData, queue, bufferRef, startTimeRef, numPackets, packetDescriptions in + { userData, queue, bufferRef, _, numPackets, _ in let `self` = Unmanaged.fromOpaque(userData!).takeUnretainedValue() guard let frameLength = self.frameLength_ else { @@ -312,7 +308,7 @@ public class VoiceProcessor { self.onError( VoiceProcessorReadError( """ - Circular buffer returned a frame of + Circular buffer returned a frame of size \(frame.count) (frameLength is \(frameLength)) """ )) diff --git a/VoiceProcessorBuffer.swift b/VoiceProcessorBuffer.swift index bc9cc89..a22f341 100644 --- a/VoiceProcessorBuffer.swift +++ b/VoiceProcessorBuffer.swift @@ -35,16 +35,17 @@ public class VoiceProcessorBuffer { numOverwrite += 1 } } - + if numOverwrite > 0 { throw VoiceProcessorReadError("Buffer overflow occurred - \(numOverwrite) samples dropped.") } } - + /// Reads a specified number of audio samples from the circular buffer. /// /// - Parameter count: The number of samples to read from the buffer. - /// - Returns: An array of audio samples read from the buffer. Will either be the requested amount, or however many are available if that is less than `count`. + /// - Returns: An array of audio samples read from the buffer. + /// Will either be the requested amount, or however many are available if that is less than `count`. public func read(count: Int) -> [Int16] { var samples: [Int16] = [] diff --git a/VoiceProcessorErrors.swift b/VoiceProcessorErrors.swift index d88857e..340c049 100644 --- a/VoiceProcessorErrors.swift +++ b/VoiceProcessorErrors.swift @@ -15,13 +15,11 @@ public class VoiceProcessorError: LocalizedError { } public var errorDescription: String? { - return message + message } public var name: String { - get { - return String(describing: type(of: self)) - } + String(describing: type(of: self)) } } @@ -30,4 +28,3 @@ public class VoiceProcessorArgumentError: VoiceProcessorError {} public class VoiceProcessorReadError: VoiceProcessorError {} public class VoiceProcessorRuntimeError: VoiceProcessorError {} - From f843d9fc2b06602b52e83965ac76b8caf6cbc267 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 15:02:54 -0700 Subject: [PATCH 10/14] ios demo action --- .github/workflows/ios-demo.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ios-demo.yml b/.github/workflows/ios-demo.yml index b3f406d..1d7b379 100644 --- a/.github/workflows/ios-demo.yml +++ b/.github/workflows/ios-demo.yml @@ -15,12 +15,14 @@ on: - 'example/**' - '!example/REAMDE.md' +defaults: + run: + working-directory: example + jobs: - build-demo: + build: + name: Build runs-on: macos-latest - defaults: - run: - working-directory: example steps: - name: Checkout From dcc2b3dc59fb787808203a9b698b507d0316d59f Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 15:06:41 -0700 Subject: [PATCH 11/14] rename --- .../project.xcworkspace/contents.xcworkspacedata | 7 ------- README.md | 4 ++-- {Example => demo}/Podfile | 0 {Example => demo}/Podfile.lock | 0 {Example => demo}/README.md | 0 .../VoiceProcessorBufferTests.swift | 0 .../VoiceProcessorTests/VoiceProcessorTests.swift | 0 .../ios-voice-processor.xcodeproj/project.pbxproj | 0 .../xcschemes/ios-voice-processor-Example.xcscheme | 0 .../ios-voice-processor/AppDelegate.swift | 0 .../ios-voice-processor/Base.lproj/Main.storyboard | 0 .../AppIcon.appiconset/Contents.json | 0 .../AppIcon.appiconset/pv_circle_512-1024.png | Bin .../AppIcon.appiconset/pv_circle_512-20.png | Bin .../AppIcon.appiconset/pv_circle_512-20@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-20@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-29.png | Bin .../AppIcon.appiconset/pv_circle_512-29@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-29@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-40.png | Bin .../AppIcon.appiconset/pv_circle_512-40@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-40@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-60@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-60@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-76.png | Bin .../AppIcon.appiconset/pv_circle_512-76@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-83.5@2x.png | Bin .../Images.xcassets/Contents.json | 0 {Example => demo}/ios-voice-processor/Info.plist | 0 .../ios-voice-processor/VUMeterView.swift | 0 .../ios-voice-processor/ViewController.swift | 0 31 files changed, 2 insertions(+), 9 deletions(-) delete mode 100644 Example/ios-voice-processor.xcodeproj/project.xcworkspace/contents.xcworkspacedata rename {Example => demo}/Podfile (100%) rename {Example => demo}/Podfile.lock (100%) rename {Example => demo}/README.md (100%) rename {Example => demo}/VoiceProcessorTests/VoiceProcessorBufferTests.swift (100%) rename {Example => demo}/VoiceProcessorTests/VoiceProcessorTests.swift (100%) rename {Example => demo}/ios-voice-processor.xcodeproj/project.pbxproj (100%) rename {Example => demo}/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme (100%) rename {Example => demo}/ios-voice-processor/AppDelegate.swift (100%) rename {Example => demo}/ios-voice-processor/Base.lproj/Main.storyboard (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png (100%) rename {Example => demo}/ios-voice-processor/Images.xcassets/Contents.json (100%) rename {Example => demo}/ios-voice-processor/Info.plist (100%) rename {Example => demo}/ios-voice-processor/VUMeterView.swift (100%) rename {Example => demo}/ios-voice-processor/ViewController.swift (100%) diff --git a/Example/ios-voice-processor.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/Example/ios-voice-processor.xcodeproj/project.xcworkspace/contents.xcworkspacedata deleted file mode 100644 index f52bf78..0000000 --- a/Example/ios-voice-processor.xcodeproj/project.xcworkspace/contents.xcworkspacedata +++ /dev/null @@ -1,7 +0,0 @@ - - - - - diff --git a/README.md b/README.md index 2467293..39002b4 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ To enable recording with your iOS device's microphone you must add the following [Permission explanation] ``` -See our [example app](example) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. +See our [example app](./example) or [this guide](https://developer.apple.com/documentation/avfaudio/avaudiosession/1616601-requestrecordpermission) for how to properly request this permission from your users. ## Usage @@ -121,7 +121,7 @@ voiceProcessor.clearFrameListeners(); ## Example -The [iOS Voice Processor app](example) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. +The [iOS Voice Processor app](./example) demonstrates how to ask for user permissions and capture output from the `VoiceProcessor`. ## Releases diff --git a/Example/Podfile b/demo/Podfile similarity index 100% rename from Example/Podfile rename to demo/Podfile diff --git a/Example/Podfile.lock b/demo/Podfile.lock similarity index 100% rename from Example/Podfile.lock rename to demo/Podfile.lock diff --git a/Example/README.md b/demo/README.md similarity index 100% rename from Example/README.md rename to demo/README.md diff --git a/Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift b/demo/VoiceProcessorTests/VoiceProcessorBufferTests.swift similarity index 100% rename from Example/VoiceProcessorTests/VoiceProcessorBufferTests.swift rename to demo/VoiceProcessorTests/VoiceProcessorBufferTests.swift diff --git a/Example/VoiceProcessorTests/VoiceProcessorTests.swift b/demo/VoiceProcessorTests/VoiceProcessorTests.swift similarity index 100% rename from Example/VoiceProcessorTests/VoiceProcessorTests.swift rename to demo/VoiceProcessorTests/VoiceProcessorTests.swift diff --git a/Example/ios-voice-processor.xcodeproj/project.pbxproj b/demo/ios-voice-processor.xcodeproj/project.pbxproj similarity index 100% rename from Example/ios-voice-processor.xcodeproj/project.pbxproj rename to demo/ios-voice-processor.xcodeproj/project.pbxproj diff --git a/Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme b/demo/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme similarity index 100% rename from Example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme rename to demo/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme diff --git a/Example/ios-voice-processor/AppDelegate.swift b/demo/ios-voice-processor/AppDelegate.swift similarity index 100% rename from Example/ios-voice-processor/AppDelegate.swift rename to demo/ios-voice-processor/AppDelegate.swift diff --git a/Example/ios-voice-processor/Base.lproj/Main.storyboard b/demo/ios-voice-processor/Base.lproj/Main.storyboard similarity index 100% rename from Example/ios-voice-processor/Base.lproj/Main.storyboard rename to demo/ios-voice-processor/Base.lproj/Main.storyboard diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png diff --git a/Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png b/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png rename to demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png diff --git a/Example/ios-voice-processor/Images.xcassets/Contents.json b/demo/ios-voice-processor/Images.xcassets/Contents.json similarity index 100% rename from Example/ios-voice-processor/Images.xcassets/Contents.json rename to demo/ios-voice-processor/Images.xcassets/Contents.json diff --git a/Example/ios-voice-processor/Info.plist b/demo/ios-voice-processor/Info.plist similarity index 100% rename from Example/ios-voice-processor/Info.plist rename to demo/ios-voice-processor/Info.plist diff --git a/Example/ios-voice-processor/VUMeterView.swift b/demo/ios-voice-processor/VUMeterView.swift similarity index 100% rename from Example/ios-voice-processor/VUMeterView.swift rename to demo/ios-voice-processor/VUMeterView.swift diff --git a/Example/ios-voice-processor/ViewController.swift b/demo/ios-voice-processor/ViewController.swift similarity index 100% rename from Example/ios-voice-processor/ViewController.swift rename to demo/ios-voice-processor/ViewController.swift From 8446d6bcd0e8ec733dd54e07b2629f508f819106 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 15:07:02 -0700 Subject: [PATCH 12/14] rename --- {demo => example}/Podfile | 0 {demo => example}/Podfile.lock | 0 {demo => example}/README.md | 0 .../VoiceProcessorBufferTests.swift | 0 .../VoiceProcessorTests/VoiceProcessorTests.swift | 0 .../ios-voice-processor.xcodeproj/project.pbxproj | 0 .../xcschemes/ios-voice-processor-Example.xcscheme | 0 .../ios-voice-processor/AppDelegate.swift | 0 .../ios-voice-processor/Base.lproj/Main.storyboard | 0 .../AppIcon.appiconset/Contents.json | 0 .../AppIcon.appiconset/pv_circle_512-1024.png | Bin .../AppIcon.appiconset/pv_circle_512-20.png | Bin .../AppIcon.appiconset/pv_circle_512-20@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-20@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-29.png | Bin .../AppIcon.appiconset/pv_circle_512-29@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-29@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-40.png | Bin .../AppIcon.appiconset/pv_circle_512-40@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-40@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-60@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-60@3x.png | Bin .../AppIcon.appiconset/pv_circle_512-76.png | Bin .../AppIcon.appiconset/pv_circle_512-76@2x.png | Bin .../AppIcon.appiconset/pv_circle_512-83.5@2x.png | Bin .../Images.xcassets/Contents.json | 0 {demo => example}/ios-voice-processor/Info.plist | 0 .../ios-voice-processor/VUMeterView.swift | 0 .../ios-voice-processor/ViewController.swift | 0 29 files changed, 0 insertions(+), 0 deletions(-) rename {demo => example}/Podfile (100%) rename {demo => example}/Podfile.lock (100%) rename {demo => example}/README.md (100%) rename {demo => example}/VoiceProcessorTests/VoiceProcessorBufferTests.swift (100%) rename {demo => example}/VoiceProcessorTests/VoiceProcessorTests.swift (100%) rename {demo => example}/ios-voice-processor.xcodeproj/project.pbxproj (100%) rename {demo => example}/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme (100%) rename {demo => example}/ios-voice-processor/AppDelegate.swift (100%) rename {demo => example}/ios-voice-processor/Base.lproj/Main.storyboard (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png (100%) rename {demo => example}/ios-voice-processor/Images.xcassets/Contents.json (100%) rename {demo => example}/ios-voice-processor/Info.plist (100%) rename {demo => example}/ios-voice-processor/VUMeterView.swift (100%) rename {demo => example}/ios-voice-processor/ViewController.swift (100%) diff --git a/demo/Podfile b/example/Podfile similarity index 100% rename from demo/Podfile rename to example/Podfile diff --git a/demo/Podfile.lock b/example/Podfile.lock similarity index 100% rename from demo/Podfile.lock rename to example/Podfile.lock diff --git a/demo/README.md b/example/README.md similarity index 100% rename from demo/README.md rename to example/README.md diff --git a/demo/VoiceProcessorTests/VoiceProcessorBufferTests.swift b/example/VoiceProcessorTests/VoiceProcessorBufferTests.swift similarity index 100% rename from demo/VoiceProcessorTests/VoiceProcessorBufferTests.swift rename to example/VoiceProcessorTests/VoiceProcessorBufferTests.swift diff --git a/demo/VoiceProcessorTests/VoiceProcessorTests.swift b/example/VoiceProcessorTests/VoiceProcessorTests.swift similarity index 100% rename from demo/VoiceProcessorTests/VoiceProcessorTests.swift rename to example/VoiceProcessorTests/VoiceProcessorTests.swift diff --git a/demo/ios-voice-processor.xcodeproj/project.pbxproj b/example/ios-voice-processor.xcodeproj/project.pbxproj similarity index 100% rename from demo/ios-voice-processor.xcodeproj/project.pbxproj rename to example/ios-voice-processor.xcodeproj/project.pbxproj diff --git a/demo/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme b/example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme similarity index 100% rename from demo/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme rename to example/ios-voice-processor.xcodeproj/xcshareddata/xcschemes/ios-voice-processor-Example.xcscheme diff --git a/demo/ios-voice-processor/AppDelegate.swift b/example/ios-voice-processor/AppDelegate.swift similarity index 100% rename from demo/ios-voice-processor/AppDelegate.swift rename to example/ios-voice-processor/AppDelegate.swift diff --git a/demo/ios-voice-processor/Base.lproj/Main.storyboard b/example/ios-voice-processor/Base.lproj/Main.storyboard similarity index 100% rename from demo/ios-voice-processor/Base.lproj/Main.storyboard rename to example/ios-voice-processor/Base.lproj/Main.storyboard diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/Contents.json diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-1024.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png diff --git a/demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png b/example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png rename to example/ios-voice-processor/Images.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png diff --git a/demo/ios-voice-processor/Images.xcassets/Contents.json b/example/ios-voice-processor/Images.xcassets/Contents.json similarity index 100% rename from demo/ios-voice-processor/Images.xcassets/Contents.json rename to example/ios-voice-processor/Images.xcassets/Contents.json diff --git a/demo/ios-voice-processor/Info.plist b/example/ios-voice-processor/Info.plist similarity index 100% rename from demo/ios-voice-processor/Info.plist rename to example/ios-voice-processor/Info.plist diff --git a/demo/ios-voice-processor/VUMeterView.swift b/example/ios-voice-processor/VUMeterView.swift similarity index 100% rename from demo/ios-voice-processor/VUMeterView.swift rename to example/ios-voice-processor/VUMeterView.swift diff --git a/demo/ios-voice-processor/ViewController.swift b/example/ios-voice-processor/ViewController.swift similarity index 100% rename from demo/ios-voice-processor/ViewController.swift rename to example/ios-voice-processor/ViewController.swift From a236e225fe6db0d2ded4185705f4b52f3bcd16fd Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Thu, 27 Jul 2023 15:09:30 -0700 Subject: [PATCH 13/14] fix --- .github/workflows/ios-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ios-tests.yml b/.github/workflows/ios-tests.yml index e0929a9..5684974 100644 --- a/.github/workflows/ios-tests.yml +++ b/.github/workflows/ios-tests.yml @@ -24,7 +24,7 @@ defaults: jobs: build: name: Run iOS Tests - runs-on: mac-latest + runs-on: macos-latest steps: - name: Checkout From c8f26748b892d79a521a5bffc6fb23cfbc286213 Mon Sep 17 00:00:00 2001 From: Ian Lavery Date: Mon, 31 Jul 2023 11:41:32 -0700 Subject: [PATCH 14/14] Update README.md Co-authored-by: Eric Mikulin --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 39002b4..0a337ef 100644 --- a/README.md +++ b/README.md @@ -94,7 +94,7 @@ do { Stop audio capture: ```swift do { - try voiceProcessor.stop(frameLength: 512, sampleRate: 16000); + try voiceProcessor.stop(); } catch { } ```