diff --git a/.github/workflows/swift-codestyle.yml b/.github/workflows/swift-codestyle.yml new file mode 100644 index 0000000..9bc2da2 --- /dev/null +++ b/.github/workflows/swift-codestyle.yml @@ -0,0 +1,24 @@ +name: Swift Codestyle + +on: + workflow_dispatch: + push: + branches: [ main ] + paths: + - '**/*.swift' + pull_request: + branches: [ main, 'v[0-9]+.[0-9]+' ] + paths: + - '**/*.swift' + +jobs: + check-switch-codestyle: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Check swift codestyle + uses: norio-nomura/action-swiftlint@3.2.1 + with: + args: lint --config res/.lint/swift/.swiftlint.yml --strict diff --git a/recipes/llm-voice-assistant/README.md b/recipes/llm-voice-assistant/README.md index 6e9d5d1..c8786ad 100644 --- a/recipes/llm-voice-assistant/README.md +++ b/recipes/llm-voice-assistant/README.md @@ -13,3 +13,4 @@ Hands-free voice assistant powered by a large language model (LLM), all voice re - [Python](python) - [Web](web) +- [iOS](ios) diff --git a/recipes/llm-voice-assistant/ios/.gitignore b/recipes/llm-voice-assistant/ios/.gitignore new file mode 100644 index 0000000..db4f90a --- /dev/null +++ b/recipes/llm-voice-assistant/ios/.gitignore @@ -0,0 +1,22 @@ +# Exclude the build directory +build/* + +# Exclude temp nibs and swap files +*~.nib +*.swp + +# Exclude OS X folder attributes +.DS_Store + +# Exclude user-specific XCode 3 and 4 files +*.mode1 +*.mode1v3 +*.mode2v3 +*.perspective +*.perspectivev3 +*.pbxuser +*.xcworkspace +project.xcworkspace +contents.xcworkspacedata +xcuserdata +Pods \ No newline at end of file diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo.xcodeproj/project.pbxproj b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo.xcodeproj/project.pbxproj new file mode 100644 index 0000000..bf795f2 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo.xcodeproj/project.pbxproj @@ -0,0 +1,450 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 54; + objects = { + +/* Begin PBXBuildFile section */ + 02A11949268D39A700A2AC99 /* LLMVoiceAssistantDemoApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 02A11948268D39A700A2AC99 /* LLMVoiceAssistantDemoApp.swift */; }; + 02A1194B268D39A700A2AC99 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 02A1194A268D39A700A2AC99 /* ContentView.swift */; }; + 02A1194D268D39AB00A2AC99 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 02A1194C268D39AB00A2AC99 /* Assets.xcassets */; }; + 02A1195F268D3FD600A2AC99 /* ViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 02A1195E268D3FD600A2AC99 /* ViewModel.swift */; }; + A3FB5D9F86DC48A026E79742 /* libPods-LLMVoiceAssistantDemo.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8116446C19FDC139F2B56F5C /* libPods-LLMVoiceAssistantDemo.a */; }; + C789D24B2BEA8E5D005FDB10 /* LoadModelView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C789D24A2BEA8E5D005FDB10 /* LoadModelView.swift */; }; + C789D24D2BEA8EAE005FDB10 /* ChatView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C789D24C2BEA8EAE005FDB10 /* ChatView.swift */; }; + C789D2512BEAD752005FDB10 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = C789D2502BEAD752005FDB10 /* Constants.swift */; }; + C7E87DC22C07C70A00C32367 /* orca_params_female.pv in Resources */ = {isa = PBXBuildFile; fileRef = C7E87DC12C07C70A00C32367 /* orca_params_female.pv */; }; + C7E87DC42C07C71B00C32367 /* cheetah_params.pv in Resources */ = {isa = PBXBuildFile; fileRef = C7E87DC32C07C71B00C32367 /* cheetah_params.pv */; }; + C7E87DC82C07E83800C32367 /* AudioPlayerStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = C7E87DC72C07E83800C32367 /* AudioPlayerStream.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 02A11945268D39A700A2AC99 /* LLMVoiceAssistantDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = LLMVoiceAssistantDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 02A11948268D39A700A2AC99 /* LLMVoiceAssistantDemoApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LLMVoiceAssistantDemoApp.swift; sourceTree = ""; }; + 02A1194A268D39A700A2AC99 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 02A1194C268D39AB00A2AC99 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 02A11951268D39AB00A2AC99 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 02A1195E268D3FD600A2AC99 /* ViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewModel.swift; sourceTree = ""; }; + 42D19EE4769392460A93BB89 /* Pods-LLMVoiceAssistantDemo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-LLMVoiceAssistantDemo.release.xcconfig"; path = "Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo.release.xcconfig"; sourceTree = ""; }; + 505D746A281AD13F6F6D5D11 /* Pods-LLMVoiceAssistantDemo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-LLMVoiceAssistantDemo.debug.xcconfig"; path = "Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo.debug.xcconfig"; sourceTree = ""; }; + 8116446C19FDC139F2B56F5C /* libPods-LLMVoiceAssistantDemo.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-LLMVoiceAssistantDemo.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + C789D24A2BEA8E5D005FDB10 /* LoadModelView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LoadModelView.swift; sourceTree = ""; }; + C789D24C2BEA8EAE005FDB10 /* ChatView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatView.swift; sourceTree = ""; }; + C789D2502BEAD752005FDB10 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; + C7E87DC12C07C70A00C32367 /* orca_params_female.pv */ = {isa = PBXFileReference; lastKnownFileType = file; path = orca_params_female.pv; sourceTree = ""; }; + C7E87DC32C07C71B00C32367 /* cheetah_params.pv */ = {isa = PBXFileReference; lastKnownFileType = file; path = cheetah_params.pv; sourceTree = ""; }; + C7E87DC72C07E83800C32367 /* AudioPlayerStream.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioPlayerStream.swift; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 02A11942268D39A700A2AC99 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + A3FB5D9F86DC48A026E79742 /* libPods-LLMVoiceAssistantDemo.a in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 02A1193C268D39A700A2AC99 = { + isa = PBXGroup; + children = ( + 02A11947268D39A700A2AC99 /* LLMVoiceAssistantDemo */, + 02A11946268D39A700A2AC99 /* Products */, + 8DB92FF3DC81AB04D3FF7242 /* Pods */, + E6585D6045A189966C133BD8 /* Frameworks */, + ); + sourceTree = ""; + }; + 02A11946268D39A700A2AC99 /* Products */ = { + isa = PBXGroup; + children = ( + 02A11945268D39A700A2AC99 /* LLMVoiceAssistantDemo.app */, + ); + name = Products; + sourceTree = ""; + }; + 02A11947268D39A700A2AC99 /* LLMVoiceAssistantDemo */ = { + isa = PBXGroup; + children = ( + C7E87DC02C07C65C00C32367 /* resources */, + 02A11948268D39A700A2AC99 /* LLMVoiceAssistantDemoApp.swift */, + 02A1194A268D39A700A2AC99 /* ContentView.swift */, + 02A1194C268D39AB00A2AC99 /* Assets.xcassets */, + 02A11951268D39AB00A2AC99 /* Info.plist */, + 02A1195E268D3FD600A2AC99 /* ViewModel.swift */, + C789D24A2BEA8E5D005FDB10 /* LoadModelView.swift */, + C789D24C2BEA8EAE005FDB10 /* ChatView.swift */, + C789D2502BEAD752005FDB10 /* Constants.swift */, + C7E87DC72C07E83800C32367 /* AudioPlayerStream.swift */, + ); + path = LLMVoiceAssistantDemo; + sourceTree = ""; + }; + 8DB92FF3DC81AB04D3FF7242 /* Pods */ = { + isa = PBXGroup; + children = ( + 505D746A281AD13F6F6D5D11 /* Pods-LLMVoiceAssistantDemo.debug.xcconfig */, + 42D19EE4769392460A93BB89 /* Pods-LLMVoiceAssistantDemo.release.xcconfig */, + ); + path = Pods; + sourceTree = ""; + }; + C7E87DC02C07C65C00C32367 /* resources */ = { + isa = PBXGroup; + children = ( + C7E87DC32C07C71B00C32367 /* cheetah_params.pv */, + C7E87DC12C07C70A00C32367 /* orca_params_female.pv */, + ); + path = resources; + sourceTree = ""; + }; + E6585D6045A189966C133BD8 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 8116446C19FDC139F2B56F5C /* libPods-LLMVoiceAssistantDemo.a */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 02A11944268D39A700A2AC99 /* LLMVoiceAssistantDemo */ = { + isa = PBXNativeTarget; + buildConfigurationList = 02A11954268D39AB00A2AC99 /* Build configuration list for PBXNativeTarget "LLMVoiceAssistantDemo" */; + buildPhases = ( + C54BB0EBC41CA3C7A3CD997F /* [CP] Check Pods Manifest.lock */, + 02A11941268D39A700A2AC99 /* Sources */, + 02A11942268D39A700A2AC99 /* Frameworks */, + 02A11943268D39A700A2AC99 /* Resources */, + 466DD26F1FA12E383AC5E702 /* [CP] Embed Pods Frameworks */, + ABFE6A8B8F163462392CE190 /* [CP] Copy Pods Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = LLMVoiceAssistantDemo; + productName = LLMVoiceAssistantDemo; + productReference = 02A11945268D39A700A2AC99 /* LLMVoiceAssistantDemo.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 02A1193D268D39A700A2AC99 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = YES; + LastSwiftUpdateCheck = 1250; + LastUpgradeCheck = 1530; + TargetAttributes = { + 02A11944268D39A700A2AC99 = { + CreatedOnToolsVersion = 12.5; + }; + }; + }; + buildConfigurationList = 02A11940268D39A700A2AC99 /* Build configuration list for PBXProject "LLMVoiceAssistantDemo" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 02A1193C268D39A700A2AC99; + productRefGroup = 02A11946268D39A700A2AC99 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 02A11944268D39A700A2AC99 /* LLMVoiceAssistantDemo */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 02A11943268D39A700A2AC99 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + C7E87DC22C07C70A00C32367 /* orca_params_female.pv in Resources */, + C7E87DC42C07C71B00C32367 /* cheetah_params.pv in Resources */, + 02A1194D268D39AB00A2AC99 /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 466DD26F1FA12E383AC5E702 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; + ABFE6A8B8F163462392CE190 /* [CP] Copy Pods Resources */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo-resources-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Copy Pods Resources"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo-resources-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-LLMVoiceAssistantDemo/Pods-LLMVoiceAssistantDemo-resources.sh\"\n"; + showEnvVarsInLog = 0; + }; + C54BB0EBC41CA3C7A3CD997F /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-LLMVoiceAssistantDemo-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 02A11941268D39A700A2AC99 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + C789D24D2BEA8EAE005FDB10 /* ChatView.swift in Sources */, + C789D24B2BEA8E5D005FDB10 /* LoadModelView.swift in Sources */, + 02A1194B268D39A700A2AC99 /* ContentView.swift in Sources */, + C789D2512BEAD752005FDB10 /* Constants.swift in Sources */, + 02A1195F268D3FD600A2AC99 /* ViewModel.swift in Sources */, + C7E87DC82C07E83800C32367 /* AudioPlayerStream.swift in Sources */, + 02A11949268D39A700A2AC99 /* LLMVoiceAssistantDemoApp.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 02A11952268D39AB00A2AC99 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = NO; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 02A11953268D39AB00A2AC99 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = NO; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 02A11955268D39AB00A2AC99 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 505D746A281AD13F6F6D5D11 /* Pods-LLMVoiceAssistantDemo.debug.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_ASSET_PATHS = ""; + DEVELOPMENT_TEAM = EU9HUJJU2X; + ENABLE_PREVIEWS = YES; + ENABLE_USER_SCRIPT_SANDBOXING = NO; + INFOPLIST_FILE = LLMVoiceAssistantDemo/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice.LLMVoiceAssistantDemo; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 02A11956268D39AB00A2AC99 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 42D19EE4769392460A93BB89 /* Pods-LLMVoiceAssistantDemo.release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_ASSET_PATHS = ""; + DEVELOPMENT_TEAM = EU9HUJJU2X; + ENABLE_PREVIEWS = YES; + ENABLE_USER_SCRIPT_SANDBOXING = NO; + INFOPLIST_FILE = LLMVoiceAssistantDemo/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = ai.picovoice.LLMVoiceAssistantDemo; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 02A11940268D39A700A2AC99 /* Build configuration list for PBXProject "LLMVoiceAssistantDemo" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 02A11952268D39AB00A2AC99 /* Debug */, + 02A11953268D39AB00A2AC99 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 02A11954268D39AB00A2AC99 /* Build configuration list for PBXNativeTarget "LLMVoiceAssistantDemo" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 02A11955268D39AB00A2AC99 /* Debug */, + 02A11956268D39AB00A2AC99 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 02A1193D268D39A700A2AC99 /* Project object */; +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo.xcodeproj/xcshareddata/xcschemes/LLMVoiceAssistantDemo.xcscheme b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo.xcodeproj/xcshareddata/xcschemes/LLMVoiceAssistantDemo.xcscheme new file mode 100644 index 0000000..1c06755 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo.xcodeproj/xcshareddata/xcschemes/LLMVoiceAssistantDemo.xcscheme @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/Contents.json b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..c400984 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,116 @@ +{ + "images" : [ + { + "size" : "20x20", + "idiom": "iphone", + "filename" : "pv_circle_512-20@2x.png", + "scale": "2x" + }, + { + "size" : "20x20", + "idiom": "iphone", + "filename" : "pv_circle_512-20@3x.png", + "scale": "3x" + }, + { + "size" : "20x20", + "idiom": "ipad", + "filename" : "pv_circle_512-20.png", + "scale": "1x" + }, + { + "size" : "20x20", + "idiom": "ipad", + "filename" : "pv_circle_512-20@2x.png", + "scale": "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "pv_circle_512-29@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "pv_circle_512-29@3x.png", + "scale" : "3x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "pv_circle_512-40@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "pv_circle_512-40@3x.png", + "scale" : "3x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "pv_circle_512-60@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "pv_circle_512-60@3x.png", + "scale" : "3x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "pv_circle_512-29.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "pv_circle_512-29@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "pv_circle_512-40.png", + "scale" : "1x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "pv_circle_512-40@2x.png", + "scale" : "2x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "pv_circle_512-76.png", + "scale" : "1x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "pv_circle_512-76@2x.png", + "scale" : "2x" + }, + { + "size" : "83.5x83.5", + "idiom" : "ipad", + "filename" : "pv_circle_512-83.5@2x.png", + "scale" : "2x" + }, + { + "size" : "1024x1024", + "idiom" : "ios-marketing", + "filename" : "pv_circle_512-1024.png", + "scale" : "1x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-1024.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-1024.png new file mode 100644 index 0000000..16b7507 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-1024.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20.png new file mode 100644 index 0000000..3e40b9d Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png new file mode 100644 index 0000000..59a7653 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20@2x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png new file mode 100644 index 0000000..2700745 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-20@3x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29.png new file mode 100644 index 0000000..7d5d3b0 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png new file mode 100644 index 0000000..e1a0c5b Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29@2x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png new file mode 100644 index 0000000..f58bfe5 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-29@3x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40.png new file mode 100644 index 0000000..59a7653 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png new file mode 100644 index 0000000..5f0b3df Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40@2x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png new file mode 100644 index 0000000..7573c8a Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-40@3x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png new file mode 100644 index 0000000..7573c8a Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-60@2x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png new file mode 100644 index 0000000..fc40879 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-60@3x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-76.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-76.png new file mode 100644 index 0000000..d450791 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-76.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png new file mode 100644 index 0000000..bac6a6f Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-76@2x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png new file mode 100644 index 0000000..cda4029 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Assets.xcassets/AppIcon.appiconset/pv_circle_512-83.5@2x.png differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/AudioPlayerStream.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/AudioPlayerStream.swift new file mode 100644 index 0000000..413dc24 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/AudioPlayerStream.swift @@ -0,0 +1,87 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import Foundation +import AVFoundation + +class AudioPlayerStream { + private let engine = AVAudioEngine() + private let playerNode = AVAudioPlayerNode() + private let mixerNode = AVAudioMixerNode() + + private var pcmBuffers = [AVAudioPCMBuffer]() + public var isPlaying = false + + init(sampleRate: Double) throws { + let audioSession = AVAudioSession.sharedInstance() + try audioSession.setCategory(.playback, mode: .default) + try audioSession.setActive(true) + + let format = AVAudioFormat( + commonFormat: .pcmFormatFloat32, + sampleRate: sampleRate, + channels: AVAudioChannelCount(1), + interleaved: false) + + engine.attach(mixerNode) + engine.connect(mixerNode, to: engine.outputNode, format: format) + + engine.attach(playerNode) + engine.connect(playerNode, to: mixerNode, format: format) + + try engine.start() + } + + func playStreamPCM(_ pcmData: [Int16], completion: @escaping (Bool) -> Void) { + let audioBuffer = AVAudioPCMBuffer( + pcmFormat: playerNode.outputFormat(forBus: 0), frameCapacity: AVAudioFrameCount(pcmData.count))! + + audioBuffer.frameLength = audioBuffer.frameCapacity + let buf = audioBuffer.floatChannelData![0] + for (index, sample) in pcmData.enumerated() { + var convertedSample = Float32(sample) / Float32(Int16.max) + if convertedSample > 1 { + convertedSample = 1 + } + if convertedSample < -1 { + convertedSample = -1 + } + buf[index] = convertedSample + } + + pcmBuffers.append(audioBuffer) + if !isPlaying { + playNextPCMBuffer(completion: completion) + } else { + completion(true) + } + } + + private func playNextPCMBuffer(completion: @escaping (Bool) -> Void) { + guard let pcmData = pcmBuffers.first else { + isPlaying = false + completion(false) + return + } + pcmBuffers.removeFirst() + + playerNode.scheduleBuffer(pcmData) { [weak self] in + self?.playNextPCMBuffer(completion: completion) + } + + playerNode.play() + isPlaying = true + completion(true) + } + + func stopStreamPCM() { + playerNode.stop() + engine.stop() + } +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ChatView.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ChatView.swift new file mode 100644 index 0000000..6c88818 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ChatView.swift @@ -0,0 +1,115 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import SwiftUI + +struct ChatView: View { + @ObservedObject var viewModel: ViewModel + + var body: some View { + let isError = viewModel.errorMessage.count > 0 + + ZStack { + VStack(alignment: .center) { + Text("Voice Assistant Demo") + + Spacer() + + resultsBox + + Spacer() + + HStack(alignment: .center) { + Spacer() + if !viewModel.enableGenerateButton { + ProgressView(value: 0).progressViewStyle(CircularProgressViewStyle()) + } + Text(viewModel.statusText).padding(.horizontal, 12) + Spacer() + } + .padding(.horizontal, 24) + + if isError { + Text(viewModel.errorMessage) + .padding() + .foregroundColor(Color.white) + .frame(maxWidth: .infinity) + .background(Constants.dangerRed) + .font(.body) + .opacity(viewModel.errorMessage.isEmpty ? 0 : 1) + .cornerRadius(10) + } + } + .padding(.bottom, 32) + .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity).background(Color.white) + } + } + + var resultsBox: some View { + VStack { + ScrollViewReader { proxy in + ScrollView { + LazyVStack(alignment: .leading) { + ForEach(0.. 0 || + !viewModel.enableGenerateButton || + viewModel.chatText.isEmpty) + } + .padding(.bottom, 12) + } + .frame( + maxWidth: .infinity, + maxHeight: .infinity, + alignment: .topLeading + ) + .background(Constants.backgroundGrey) + .cornerRadius(3.0) + .padding(24) + } +} + +#Preview { + ChatView(viewModel: ViewModel()) +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Constants.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Constants.swift new file mode 100644 index 0000000..4aad4bd --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Constants.swift @@ -0,0 +1,20 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import SwiftUI + +class Constants { + public static let activeBlue = Color(red: 55/255, green: 125/255, blue: 1, opacity: 1) + public static let dangerRed = Color(red: 1, green: 14/255, blue: 14/255, opacity: 1) + public static let secondaryGrey = Color(red: 118/255, green: 131/255, blue: 142/255, opacity: 1) + public static let backgroundGrey = Color(red: 118/255, green: 131/255, blue: 142/255, opacity: 0.1) + + public static let btnColor = {(enabled: Bool) in (!enabled) ? secondaryGrey : activeBlue} + public static let errorMsgColor = {(enabled: Bool) in (!enabled) ? dangerRed : Color.white} +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ContentView.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ContentView.swift new file mode 100644 index 0000000..c9f19f5 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ContentView.swift @@ -0,0 +1,28 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import SwiftUI + +struct ContentView: View { + @StateObject var viewModel = ViewModel() + + @State var showSidebar = false + + var body: some View { + if !viewModel.enginesLoaded { + LoadModelView(viewModel: viewModel) + } else { + ChatView(viewModel: viewModel) + } + } +} + +#Preview { + ContentView() +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Info.plist b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Info.plist new file mode 100644 index 0000000..90b26c4 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/Info.plist @@ -0,0 +1,51 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + $(PRODUCT_BUNDLE_PACKAGE_TYPE) + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + NSMicrophoneUsageDescription + For voice assistant + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + + UIApplicationSupportsIndirectInputEvents + + UILaunchScreen + + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/LLMVoiceAssistantDemoApp.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/LLMVoiceAssistantDemoApp.swift new file mode 100644 index 0000000..3494f16 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/LLMVoiceAssistantDemoApp.swift @@ -0,0 +1,19 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import SwiftUI + +@main +struct LLMVoiceAssistantDemoApp: App { + var body: some Scene { + WindowGroup { + ContentView() + } + } +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/LoadModelView.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/LoadModelView.swift new file mode 100644 index 0000000..dafe5c5 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/LoadModelView.swift @@ -0,0 +1,82 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import SwiftUI + +struct LoadModelView: View { + @ObservedObject var viewModel: ViewModel + + @State var showSidebar = false + + var body: some View { + let isError = !viewModel.errorMessage.isEmpty + + VStack(alignment: .center) { + Spacer() + + if !isError { + Text(viewModel.modelLoadStatusText) + .frame( + minWidth: 0, + maxWidth: UIScreen.main.bounds.width - 50, + minHeight: UIScreen.main.bounds.height / 2) + .padding(.vertical, 10) + .padding(.horizontal, 10) + .font(.body) + .cornerRadius(.infinity) + if !viewModel.enableLoadModelButton { + ProgressView(value: 0).progressViewStyle(CircularProgressViewStyle()) + } + } else { + Text(viewModel.errorMessage) + .padding() + .foregroundColor(Color.white) + .frame(maxWidth: .infinity) + .background(Constants.dangerRed) + .font(.body) + .opacity(viewModel.errorMessage.isEmpty ? 0 : 1) + .cornerRadius(10) + } + + Spacer() + + Button(action: viewModel.extractModelFile) { + Text("Load Model") + .background(Constants.btnColor(viewModel.enableLoadModelButton)) + .foregroundColor(.white) + .padding(.horizontal, 35.0) + .padding(.vertical, 20.0) + }.background( + Capsule().fill(Constants.btnColor(viewModel.enableLoadModelButton)) + ) + .padding(12) + .disabled(!viewModel.enableLoadModelButton) + + .fileImporter( + isPresented: $viewModel.showFileImporter, + allowedContentTypes: [.data], + allowsMultipleSelection: false + ) { result in + switch result { + case .success(let files): + viewModel.selectedModelUrl = files[0] + viewModel.loadEngines() + case .failure: + break + } + } + + Spacer() + }.frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity).background(Color.white) + } +} + +#Preview { + LoadModelView(viewModel: ViewModel()) +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ViewModel.swift b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ViewModel.swift new file mode 100644 index 0000000..1229fa9 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/ViewModel.swift @@ -0,0 +1,360 @@ +// +// Copyright 2024 Picovoice Inc. +// You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE" +// file accompanying this source. +// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +// specific language governing permissions and limitations under the License. +// + +import Porcupine +import Cheetah +import PicoLLM +import Orca +import ios_voice_processor + +import Combine + +enum ChatState { + case WAKEWORD + case STT + case GENERATE + case ERROR +} + +class ViewModel: ObservableObject { + + private let ACCESS_KEY = "${YOUR_ACCESS_KEY_HERE}" + + private var porcupine: Porcupine? + private var cheetah: Cheetah? + private var orca: Orca? + + private var picollm: PicoLLM? + private var dialog: PicoLLMDialog? + + private var chatState: ChatState = .WAKEWORD + + private var audioStream: AudioPlayerStream? + + static let modelLoadStatusTextDefault = """ +Start by loading a `.pllm` model file. + +You can download directly to your device or airdrop from a Mac. +""" + @Published var modelLoadStatusText = modelLoadStatusTextDefault + @Published var enableLoadModelButton = true + @Published var showFileImporter = false + @Published var selectedModelUrl: URL? + + @Published var enginesLoaded = false + + static let statusTextDefault = "Say `Picovoice`!" + @Published var statusText = statusTextDefault + + @Published var promptText = "" + @Published var enableGenerateButton = true + + @Published var chatText: [Message] = [] + + @Published var errorMessage = "" + + deinit { + if picollm != nil { + picollm!.delete() + } + } + + public func extractModelFile() { + showFileImporter = true + } + + public func loadEngines() { + errorMessage = "" + modelLoadStatusText = "" + enableLoadModelButton = false + + let modelAccess = selectedModelUrl!.startAccessingSecurityScopedResource() + if !modelAccess { + errorMessage = "Can't get permissions to access model file" + enableLoadModelButton = true + return + } + + DispatchQueue.global(qos: .userInitiated).async { [self] in + let setStatusText = {(_ msg: String) in + DispatchQueue.main.async { [self] in + modelLoadStatusText = msg + } + } + do { + setStatusText("Loading Porcupine...") + porcupine = try Porcupine(accessKey: ACCESS_KEY, keyword: .picovoice) + + setStatusText("Loading Cheetah...") + let cheetahModelPath = Bundle(for: type(of: self)).path(forResource: "cheetah_params", ofType: "pv")! + cheetah = try Cheetah(accessKey: ACCESS_KEY, modelPath: cheetahModelPath) + + setStatusText("Loading picoLLM...") + picollm = try PicoLLM(accessKey: ACCESS_KEY, modelPath: selectedModelUrl!.path) + dialog = try picollm!.getDialog() + + setStatusText("Loading Orca...") + let orcaModelPath = Bundle(for: type(of: self)).path(forResource: "orca_params_female", ofType: "pv")! + orca = try Orca(accessKey: ACCESS_KEY, modelPath: orcaModelPath) + + setStatusText("Loading Audio Player...") + audioStream = try AudioPlayerStream(sampleRate: Double(self.orca!.sampleRate!)) + + setStatusText("Loading Voice Processor...") + VoiceProcessor.instance.addFrameListener(VoiceProcessorFrameListener(audioCallback)) + VoiceProcessor.instance.addErrorListener(VoiceProcessorErrorListener(errorCallback)) + startAudioRecording() + + DispatchQueue.main.async { [self] in + enginesLoaded = true + } + } catch { + DispatchQueue.main.async { [self] in + unloadEngines() + errorMessage = "\(error.localizedDescription)" + } + } + + DispatchQueue.main.async { [self] in + selectedModelUrl!.stopAccessingSecurityScopedResource() + + modelLoadStatusText = ViewModel.modelLoadStatusTextDefault + enableLoadModelButton = true + } + } + } + + public func unloadEngines() { + stopAudioRecording() + VoiceProcessor.instance.clearFrameListeners() + VoiceProcessor.instance.clearErrorListeners() + + if porcupine != nil { + porcupine!.delete() + } + if cheetah != nil { + cheetah!.delete() + } + if picollm != nil { + picollm!.delete() + } + if orca != nil { + orca!.delete() + } + porcupine = nil + cheetah = nil + picollm = nil + orca = nil + + errorMessage = "" + promptText = "" + chatText.removeAll() + enginesLoaded = false + } + + private func startAudioRecording() { + DispatchQueue.main.sync { + do { + try VoiceProcessor.instance.start( + frameLength: Porcupine.frameLength, + sampleRate: Porcupine.sampleRate) + } catch { + errorMessage = "\(error.localizedDescription)" + } + } + } + + private func stopAudioRecording() { + do { + try VoiceProcessor.instance.stop() + } catch { + DispatchQueue.main.async { [self] in + errorMessage = "\(error.localizedDescription)" + } + } + } + + private var completionQueue = DispatchQueue(label: "text-stream-queue") + private var completionArray: [String] = [] + + private let stopPhrases = [ + "", // Llama-2, Mistral, and Mixtral + "", // Gemma + "<|endoftext|>", // Phi-2 + "<|eot_id|>" // Llama-3 + ] + + private func streamCallback(completion: String) { + DispatchQueue.main.async { [self] in + if self.stopPhrases.contains(completion) { + return + } + + completionQueue.async { + self.completionArray.append(completion) + } + chatText[chatText.count - 1].append(text: completion) + } + } + + public func generate() { + errorMessage = "" + + enableGenerateButton = false + + DispatchQueue.global(qos: .userInitiated).async { [self] in + do { + try dialog!.addHumanRequest(content: chatText[chatText.count - 1].msg) + + DispatchQueue.main.async { [self] in + chatText.append(Message(speaker: "picoLLM:", msg: "")) + } + + let result = try picollm!.generate( + prompt: dialog!.prompt(), + completionTokenLimit: 128, + streamCallback: streamCallback) + + try dialog!.addLLMResponse(content: result.completion) + } catch { + DispatchQueue.main.async { [self] in + errorMessage = "\(error.localizedDescription)" + } + } + + DispatchQueue.main.async { [self] in + statusText = ViewModel.statusTextDefault + chatState = .WAKEWORD + + promptText = "" + enableGenerateButton = true + } + } + + DispatchQueue.global(qos: .userInitiated).async { [self] in + do { + let orcaStream = try self.orca!.streamOpen() + + var warmup = true + var warmupBuffer: [Int16] = [] + + var itemsRemaining = true + while chatState == .GENERATE || itemsRemaining { + completionQueue.sync { + itemsRemaining = !self.completionArray.isEmpty + } + + if itemsRemaining { + var token = "" + completionQueue.sync { + token = completionArray[0] + completionArray.removeFirst() + } + + let pcm = try orcaStream.synthesize(text: token) + if pcm != nil { + if warmup { + warmupBuffer.append(contentsOf: pcm!) + if warmupBuffer.count >= (1 * orca!.sampleRate!) { + audioStream!.playStreamPCM(warmupBuffer, completion: { isPlaying in + if !isPlaying { + self.startAudioRecording() + } + }) + warmupBuffer.removeAll() + warmup = false + } + } else { + audioStream!.playStreamPCM(pcm!, completion: {_ in }) + } + } + } + } + + if !warmupBuffer.isEmpty { + audioStream!.playStreamPCM(warmupBuffer, completion: { isPlaying in + if !isPlaying { + self.startAudioRecording() + } + }) + } + + let pcm = try orcaStream.flush() + if pcm != nil { + audioStream!.playStreamPCM(pcm!, completion: {_ in}) + } + orcaStream.close() + } catch { + DispatchQueue.main.async { [self] in + errorMessage = "\(error.localizedDescription)" + } + } + } + } + + public func clearText() { + promptText = "" + chatText.removeAll() + } + + private func audioCallback(frame: [Int16]) { + do { + if audioStream?.isPlaying ?? false { + return + } + if chatState == .WAKEWORD { + let keyword = try self.porcupine!.process(pcm: frame) + if keyword != -1 { + DispatchQueue.main.async { [self] in + statusText = "Listening..." + chatText.append(Message(speaker: "You:", msg: "")) + chatState = .STT + } + } + } else if chatState == .STT { + var (transcription, endpoint) = try self.cheetah!.process(frame) + if endpoint { + transcription += "\(try self.cheetah!.flush())" + } + if !transcription.isEmpty { + DispatchQueue.main.async { [self] in + chatText[chatText.count - 1].append(text: transcription) + } + } + if endpoint { + DispatchQueue.main.async { [self] in + statusText = "Generating..." + chatState = .GENERATE + stopAudioRecording() + self.generate() + } + } + } + } catch { + DispatchQueue.main.async { [self] in + errorMessage = "\(error.localizedDescription)" + } + } + } + + private func errorCallback(error: VoiceProcessorError) { + DispatchQueue.main.async { [self] in + errorMessage = "\(error.localizedDescription)" + } + } +} + +struct Message: Equatable { + var speaker: String + var msg: String + + mutating func append(text: String) { + self.msg.append(text) + } +} diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/resources/cheetah_params.pv b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/resources/cheetah_params.pv new file mode 100644 index 0000000..15e2bd9 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/resources/cheetah_params.pv differ diff --git a/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/resources/orca_params_female.pv b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/resources/orca_params_female.pv new file mode 100644 index 0000000..674f9f5 Binary files /dev/null and b/recipes/llm-voice-assistant/ios/LLMVoiceAssistantDemo/resources/orca_params_female.pv differ diff --git a/recipes/llm-voice-assistant/ios/Podfile b/recipes/llm-voice-assistant/ios/Podfile new file mode 100644 index 0000000..fce9590 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/Podfile @@ -0,0 +1,10 @@ +source 'https://cdn.cocoapods.org/' +platform :ios, '16.0' + +target 'LLMVoiceAssistantDemo' do + pod 'Porcupine-iOS', '~> 3.0.1' + pod 'Cheetah-iOS', '~> 2.0.0' + pod 'picoLLM-iOS', '~> 1.0.0' + pod 'Orca-iOS', '~> 0.2.1' + pod 'ios-voice-processor', '~> 1.1.0' +end diff --git a/recipes/llm-voice-assistant/ios/Podfile.lock b/recipes/llm-voice-assistant/ios/Podfile.lock new file mode 100644 index 0000000..d5aa58b --- /dev/null +++ b/recipes/llm-voice-assistant/ios/Podfile.lock @@ -0,0 +1,33 @@ +PODS: + - Cheetah-iOS (2.0.0) + - ios-voice-processor (1.1.0) + - Orca-iOS (0.2.1) + - picoLLM-iOS (1.0.0) + - Porcupine-iOS (3.0.1): + - ios-voice-processor (~> 1.1.0) + +DEPENDENCIES: + - Cheetah-iOS (~> 2.0.0) + - ios-voice-processor (~> 1.1.0) + - Orca-iOS (~> 0.2.1) + - picoLLM-iOS (~> 1.0.0) + - Porcupine-iOS (~> 3.0.1) + +SPEC REPOS: + trunk: + - Cheetah-iOS + - ios-voice-processor + - Orca-iOS + - picoLLM-iOS + - Porcupine-iOS + +SPEC CHECKSUMS: + Cheetah-iOS: d98a5edcbf3b74dda6027aeac6a8c0f5997a47a2 + ios-voice-processor: 8e32d7f980a06d392d128ef1cd19cf6ddcaca3c1 + Orca-iOS: f6b6124d78189e26c8c0457022a5948217ebe2d3 + picoLLM-iOS: 02cdb501b4beb74a9c1dea29d5cf461d65ea4a6c + Porcupine-iOS: 6d69509fa587f3ac0be1adfefb48e0c6ce029fff + +PODFILE CHECKSUM: a79add61281fdc5db76ecb9065dfc592704216b4 + +COCOAPODS: 1.15.2 diff --git a/recipes/llm-voice-assistant/ios/README.md b/recipes/llm-voice-assistant/ios/README.md new file mode 100644 index 0000000..3e77972 --- /dev/null +++ b/recipes/llm-voice-assistant/ios/README.md @@ -0,0 +1,51 @@ +## See It In Action! + +[![LLM VA in Action](https://img.youtube.com/vi/VNTzzePFhPk/0.jpg)](https://www.youtube.com/watch?v=VNTzzePFhPk) + +## Compatibility + +- iOS 16.0+ + +## AccessKey + +AccessKey is your authentication and authorization token for deploying Picovoice SDKs, including picoLLM. Anyone who is +using Picovoice needs to have a valid AccessKey. You must keep your AccessKey secret. You would need internet +connectivity to validate your AccessKey with Picovoice license servers even though the LLM inference is running 100% +offline and completely free for open-weight models. Everyone who signs up for +[Picovoice Console](https://console.picovoice.ai/) receives a unique AccessKey. + +## picoLLM Model + +picoLLM Inference Engine supports many open-weight models. The models are on +[Picovoice Console](https://console.picovoice.ai/). + +Download your desired model file from the [Picovoice Console](https://console.picovoice.ai/). +If you do not download the file directly from your iOS device, +you will need to upload it to the device to use it with the demos. +To upload the model, use AirDrop or connect your iOS device to your computer via USB or launch a simulator. +Copy your model file to the device. + +## Usage + +1. Install the dependencies using `CocoaPods`: + +```console +pod install +``` + +2. Open the `PicoLLMCompletionDemo.xcworkspace` in XCode + +3. Replace `let ACCESS_KEY = "${YOUR_ACCESS_KEY_HERE}"` in the file [VieModel.swift](./LLMVoiceAssistantDemo/ViewModel.swift) with your AccessKey obtained from [Picovoice Console](https://console.picovoice.ai/). + +4. Build and run the project on your device. + +5. Press the `Load Model` button and load the model file from your device's storage. + +6. Say "Picovoice", then speak to the voice assistant! + +## Custom Wake Word + +The demo's default wake phrase is `Picovoice`. +You can generate your custom (branded) wake word using Picovoice Console by following [Porcupine Wake Word documentation (https://picovoice.ai/docs/porcupine/). +Once you have the model trained, add it to the [resources](./LLMVoiceAssistantDemo/resources) directory +and include it as an argument to the `Porcupine` initialization in [VieModel.swift](./LLMVoiceAssistantDemo/ViewModel.swift) diff --git a/res/.lint/spell-check/.cspell.json b/res/.lint/spell-check/.cspell.json index 60f6420..d06a426 100644 --- a/res/.lint/spell-check/.cspell.json +++ b/res/.lint/spell-check/.cspell.json @@ -13,6 +13,18 @@ "ignorePaths": [ "**/__pycache__/**", + // ios + "../../../recipes/llm-voice-assistant/ios/PicoLLMVoiceAssistantDemo/resources/**/*", + "**/*.xcconfig", + "**/*.podspec", + "**/Podfile", + "**/*.plist", + "**/contents.xcworkspacedata", + "**/*.storyboard", + "**/*.pbxproj", + "**/*.xcscheme", + "**/*.m", + // javascript "**/package.json", "**/packages-lock.json", @@ -22,4 +34,4 @@ // common "**/*.pv" ] -} \ No newline at end of file +} diff --git a/res/.lint/spell-check/dict.txt b/res/.lint/spell-check/dict.txt index 3e37e48..2b839d6 100644 --- a/res/.lint/spell-check/dict.txt +++ b/res/.lint/spell-check/dict.txt @@ -14,4 +14,6 @@ pvporcupine pvrecorder samplerate sounddevice -tock \ No newline at end of file +tock +WAKEWORD +xcworkspace diff --git a/res/.lint/swift/.swiftlint.yml b/res/.lint/swift/.swiftlint.yml new file mode 100644 index 0000000..0e60bdd --- /dev/null +++ b/res/.lint/swift/.swiftlint.yml @@ -0,0 +1,11 @@ +disabled_rules: + - identifier_name + - function_body_length + - force_cast + - implicit_getter + - cyclomatic_complexity + - function_parameter_count + - type_body_length +excluded: + - ${PWD}/**/Pods + - ${PWD}/**/node_modules