diff --git a/progress/sergey-karasev.md b/progress/sergey-karasev.md index 7898efa2d..7b7abbe9b 100644 --- a/progress/sergey-karasev.md +++ b/progress/sergey-karasev.md @@ -1,4 +1,4 @@ -2023-11-10 Fri: Working on [truncating the accumulating context to a maximum of 512 characters](https://github.com/harmony-one/x/pull/159) and adding unit tests for the "limiter", [added the same logic to harmony one bot](https://github.com/harmony-one/HarmonyOneBot/pull/343) +2023-11-10 Fri: Working on [truncating the accumulating context to a maximum of 512 characters](https://github.com/harmony-one/x/pull/159) and adding unit tests for the "limiter" 2023-11-09 Thu: I have added [a window to display a link to the application](https://github.com/harmony-one/x/pull/142) (share feature) after the user taps "new session" button for the seventh time, also [added a throttler](https://github.com/harmony-one/x/pull/144) to the reset session function (in order not to interrupt the greeting). Clarified the code regarding tap-to-speak and [fixed the play-pause button state](https://github.com/harmony-one/x/pull/149) diff --git a/progress/theo-fandrich.md b/progress/theo-fandrich.md index c0fc6c4ea..592dd66cf 100644 --- a/progress/theo-fandrich.md +++ b/progress/theo-fandrich.md @@ -1,10 +1,4 @@ -2023-11-10 Fri: Made a 9am push that integrated some new support for in App purchases coming soon. Working on few small bugs still in place such has long press triggering tap functionality, overlap of "Tap to Speak" & "Press & Hold" functionality, and will improve logic for free credits. - -2023-11-09 Thu: Made a huge push for 3 initial releases to the app store today. Download voice AI on the App Store [here](x.country/app). - -2023-11-08 Wed: Worked on engineers to fix bugs regarding UI inconsistencies and conducted product testing for rate limiting. - -2023-11-07 Tue: Product tested, listed out bugs, delegated fixes to engineers, submitted to App Store connect. +2023-11-07 Tue: Product tested, listed out bugs, delegated fixes to engineers, submitted to App store connect. 2023-11-06 Mon: Worked on preparing app store submission. Calculated pricing for Voice AI and documented how the backend and iOS app should communicate to support the bahavior. The updated UI will be implemented by Tuesday morning. diff --git a/progress/yuriy-menkov.md b/progress/yuriy-menkov.md index f0390e2b7..d38638e38 100644 --- a/progress/yuriy-menkov.md +++ b/progress/yuriy-menkov.md @@ -1,4 +1,4 @@ -2023-11-10 Fri: [Resolved](https://github.com/harmony-one/x/pull/161) issue with long press actions (Ensure long press actions do not trigger tap actions vice versa). Working on tracking active using app time and showing suggestions to share with friends and share on Twitter. +2023-11-10 Fri: Resolved issue with long press actions (Ensure long press actions do not trigger tap actions vice versa). Working on tracking active using app time and showing suggestions to share with friends and share on Twitter. 2023-11-9 Thu: [Added](https://github.com/harmony-one/x/pull/148/files) the ability to repeat the current session to resolve repeat bug (When hitting "Repeat" during the first stream, it says "Hey" while the stream is going. It should just start again from the beginning instead.) diff --git a/voice/voice-ai/Voice AI.xcodeproj/project.pbxproj b/voice/voice-ai/Voice AI.xcodeproj/project.pbxproj index badf9bc92..f7eaaea0d 100644 --- a/voice/voice-ai/Voice AI.xcodeproj/project.pbxproj +++ b/voice/voice-ai/Voice AI.xcodeproj/project.pbxproj @@ -91,8 +91,7 @@ B36367562AFC69F2000409FC /* RandomFactTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B31CDCE62AFC644D00AB39EE /* RandomFactTests.swift */; }; B36367572AFC6A04000409FC /* MockGenerator.swift in Sources */ = {isa = PBXBuildFile; fileRef = F7F1150D2AF825F700BC191C /* MockGenerator.swift */; }; B38ADB9D2AFE0A0F006BDC93 /* AppConfig.plist in Resources */ = {isa = PBXBuildFile; fileRef = F72708412AFD672E000DE81D /* AppConfig.plist */; }; - B3C083B32AF1BADB0069232C /* OpenAITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3C083B22AF1BADB0069232C /* OpenAITests.swift */; }; - B3C0FE4B2AFEC68800B712E7 /* IAPTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3C0FE4A2AFEC68800B712E7 /* IAPTests.swift */; }; + B3C083B32AF1BADB0069232C /* MessageContextTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3C083B22AF1BADB0069232C /* MessageContextTests.swift */; }; B3D0A3442AF29B1B00E8B0DA /* MockNetworkService.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D0A3432AF29B1B00E8B0DA /* MockNetworkService.swift */; }; B91681282AFBA3A80006E463 /* ReviewRequester.swift in Sources */ = {isa = PBXBuildFile; fileRef = B9B331A02AFB803500F6A9C9 /* ReviewRequester.swift */; }; B919B7BF2AF3C3F7006335D1 /* AudioEngineAndSessionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B919B7BE2AF3C3F7006335D1 /* AudioEngineAndSessionTests.swift */; }; @@ -157,6 +156,8 @@ F61049122AF02DF50087F745 /* SwiftyJSON in Frameworks */ = {isa = PBXBuildFile; productRef = F61049112AF02DF50087F745 /* SwiftyJSON */; }; F67E43322AFAC166001B72CD /* SentrySwiftUI in Frameworks */ = {isa = PBXBuildFile; productRef = F67E43312AFAC166001B72CD /* SentrySwiftUI */; }; F67E43342AFAC16C001B72CD /* Sentry in Frameworks */ = {isa = PBXBuildFile; productRef = F67E43332AFAC16C001B72CD /* Sentry */; }; + F723CC1F2AFECE2000B2A23A /* TextToSpeechConverterTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = F723CC1E2AFECE2000B2A23A /* TextToSpeechConverterTests.swift */; }; + F723CC212AFEFFA400B2A23A /* MockAVSpeechSynthesizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = F723CC202AFEFFA400B2A23A /* MockAVSpeechSynthesizer.swift */; }; F72708422AFD672E000DE81D /* AppConfig.plist in Resources */ = {isa = PBXBuildFile; fileRef = F72708412AFD672E000DE81D /* AppConfig.plist */; }; F7C16FE82AFC576000D11529 /* ThemeManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B346DF772AF562020023FC87 /* ThemeManager.swift */; }; F7F115052AF8173300BC191C /* SpeechRecognitionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = F7F114F52AF8172800BC191C /* SpeechRecognitionTests.swift */; }; @@ -233,8 +234,7 @@ B346DF772AF562020023FC87 /* ThemeManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThemeManager.swift; sourceTree = ""; }; B3B3BA792AFB40A300D8F8C6 /* Theme.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Theme.swift; sourceTree = ""; }; B3C083B12AF1948D0069232C /* OpenAIService.xctestplan */ = {isa = PBXFileReference; lastKnownFileType = text; name = OpenAIService.xctestplan; path = x/OpenAIService.xctestplan; sourceTree = SOURCE_ROOT; }; - B3C083B22AF1BADB0069232C /* OpenAITests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OpenAITests.swift; sourceTree = ""; }; - B3C0FE4A2AFEC68800B712E7 /* IAPTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IAPTests.swift; sourceTree = ""; }; + B3C083B22AF1BADB0069232C /* MessageContextTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MessageContextTests.swift; sourceTree = ""; }; B3D0A3432AF29B1B00E8B0DA /* MockNetworkService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockNetworkService.swift; sourceTree = ""; }; B919B7BE2AF3C3F7006335D1 /* AudioEngineAndSessionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioEngineAndSessionTests.swift; sourceTree = ""; }; B930AADC2ADE2DE5009F9F8C /* Color.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Color.swift; sourceTree = ""; }; @@ -279,6 +279,8 @@ CD0D13682ADA74D100031EDD /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS17.0.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; CD0D136A2ADB28CE00031EDD /* logo.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = logo.png; sourceTree = SOURCE_ROOT; }; F61049092AF02D820087F745 /* OpenAIStreamService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OpenAIStreamService.swift; sourceTree = ""; }; + F723CC1E2AFECE2000B2A23A /* TextToSpeechConverterTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextToSpeechConverterTests.swift; sourceTree = ""; }; + F723CC202AFEFFA400B2A23A /* MockAVSpeechSynthesizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAVSpeechSynthesizer.swift; sourceTree = ""; }; F72708412AFD672E000DE81D /* AppConfig.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = AppConfig.plist; sourceTree = ""; }; F7F114F52AF8172800BC191C /* SpeechRecognitionTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SpeechRecognitionTests.swift; sourceTree = ""; }; F7F114F62AF8172800BC191C /* PermissionTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PermissionTests.swift; sourceTree = ""; }; @@ -603,7 +605,8 @@ F7F1150D2AF825F700BC191C /* MockGenerator.swift */, B31CDCE62AFC644D00AB39EE /* RandomFactTests.swift */, 6E35CB4A2AFE86130004D2D2 /* OpenAIUtilsTests.swift */, - B3C0FE4A2AFEC68800B712E7 /* IAPTests.swift */, + F723CC1E2AFECE2000B2A23A /* TextToSpeechConverterTests.swift */, + F723CC202AFEFFA400B2A23A /* MockAVSpeechSynthesizer.swift */, ); path = xTests; sourceTree = ""; @@ -987,6 +990,7 @@ F7F1150C2AF8173300BC191C /* DashboardViewTests.swift in Sources */, F7F115062AF8173300BC191C /* StringTests.swift in Sources */, B31CDCE42AFC565400AB39EE /* ThemeManager.swift in Sources */, + F723CC212AFEFFA400B2A23A /* MockAVSpeechSynthesizer.swift in Sources */, B9C4A81F2AEE594900327529 /* MockSpeechRecognition.swift in Sources */, F610490B2AF02D820087F745 /* OpenAIStreamService.swift in Sources */, B36367562AFC69F2000409FC /* RandomFactTests.swift in Sources */, @@ -997,6 +1001,7 @@ B9C4A8322AEE96B600327529 /* Usage.swift in Sources */, F7F1150A2AF8173300BC191C /* VibrationManagerTests.swift in Sources */, B36367572AFC6A04000409FC /* MockGenerator.swift in Sources */, + F723CC1F2AFECE2000B2A23A /* TextToSpeechConverterTests.swift in Sources */, A40CFCE42AF98BB700E02128 /* RandomFact.swift in Sources */, B9C4A8312AEE96B300327529 /* OpenAIResponse.swift in Sources */, F7F115082AF8173300BC191C /* AudioPlayerTests.swift in Sources */, @@ -1011,7 +1016,7 @@ F7F115072AF8173300BC191C /* PermissionTests.swift in Sources */, 6E53AF4C2AF012760022A8F2 /* GridButton.swift in Sources */, B9C4A82F2AEE96AE00327529 /* Choices.swift in Sources */, - B3C083B32AF1BADB0069232C /* OpenAITests.swift in Sources */, + B3C083B32AF1BADB0069232C /* MessageContextTests.swift in Sources */, B9C4A82C2AEE967200327529 /* AudioPlayer.swift in Sources */, 6E53AF512AF012A40022A8F2 /* Color.swift in Sources */, ); diff --git a/voice/voice-ai/x/Actions/GridButton.swift b/voice/voice-ai/x/Actions/GridButton.swift index 1f840df5a..8fc362517 100644 --- a/voice/voice-ai/x/Actions/GridButton.swift +++ b/voice/voice-ai/x/Actions/GridButton.swift @@ -7,10 +7,6 @@ struct GridButton: View { var foregroundColor: Color var active: Bool = false var isPressed: Bool = false - - @State private var timeAtPress = Date() - @State private var isDragActive = false - var image: String? = nil var colorExternalManage: Bool = false var action: () -> Void @@ -18,63 +14,10 @@ struct GridButton: View { let imageTextSpacing: CGFloat = 40 @Environment(\.verticalSizeClass) var verticalSizeClass @Environment(\.horizontalSizeClass) var horizontalSizeClass - - func onDragEnded() { - self.isDragActive = false - } - - func onDragStart() { - if(!self.isDragActive) { - self.isDragActive = true - - self.timeAtPress = Date() - } - } - - var body: some View { - let drag = DragGesture(minimumDistance: 0) - .onChanged({ drag in - self.onDragStart() - }) - .onEnded({ drag in - self.onDragEnded() - }) - - let hackyPinch = MagnificationGesture(minimumScaleDelta: 0.0) - .onChanged({ delta in - self.onDragEnded() - }) - .onEnded({ delta in - self.onDragEnded() - }) - - let hackyRotation = RotationGesture(minimumAngleDelta: Angle(degrees: 0.0)) - .onChanged({ delta in - self.onDragEnded() - }) - .onEnded({ delta in - self.onDragEnded() - }) - - let hackyPress = LongPressGesture(minimumDuration: 0.0, maximumDistance: 0.0) - .onChanged({ _ in - self.onDragEnded() - }) - .onEnded({ delta in - self.onDragEnded() - }) - - let combinedGesture = drag - .simultaneously(with: hackyPinch) - .simultaneously(with: hackyRotation) - .exclusively(before: hackyPress) - - return Button(action: { - let elapsed = Date().timeIntervalSince(self.timeAtPress) - if(elapsed < 3) { - action() - } + var body: some View { + Button(action: { + action() }) { VStack(spacing: imageTextSpacing) { Image(pressEffectButtonImage()) // button.image) @@ -91,7 +34,6 @@ struct GridButton: View { .alignmentGuide(.bottom) { _ in 0.5 } } .buttonStyle(PressEffectButtonStyle(theme: currentTheme, active: active, invertColors: button.action == .speak && button.pressedLabel == nil)) - .simultaneousGesture(combinedGesture) } private func pressEffectButtonImage() -> String { diff --git a/voice/voice-ai/x/SpeechRecognition/SpeechRecognition.swift b/voice/voice-ai/x/SpeechRecognition/SpeechRecognition.swift index 56a5b3c3d..74617a6b1 100644 --- a/voice/voice-ai/x/SpeechRecognition/SpeechRecognition.swift +++ b/voice/voice-ai/x/SpeechRecognition/SpeechRecognition.swift @@ -80,7 +80,7 @@ class SpeechRecognition: NSObject, ObservableObject, SpeechRecognitionProtocol { } @Published private var _isPlaying = false - var isPlaingPublisher: Published.Publisher { + var isPlayingPublisher: Published.Publisher { $_isPlaying } diff --git a/voice/voice-ai/xTests/IAPTests.swift b/voice/voice-ai/xTests/IAPTests.swift deleted file mode 100644 index 85485ba4f..000000000 --- a/voice/voice-ai/xTests/IAPTests.swift +++ /dev/null @@ -1,21 +0,0 @@ -import XCTest - -class PersistenceTests: XCTestCase { - - func testIncreaseConsumablesCount() { - // Given - let initialCreditsCount = UserDefaults.standard.integer(forKey: Persistence.creditsCountKey) - let creditsAmount = 5 - - // When - Persistence.increaseConsumablesCount(creditsAmount: creditsAmount) - - // Then - let updatedCreditsCount = UserDefaults.standard.integer(forKey: Persistence.creditsCountKey) - XCTAssertEqual(updatedCreditsCount, initialCreditsCount + creditsAmount) - } - - // Add more test cases as needed - -} - diff --git a/voice/voice-ai/xTests/OpenAITests.swift b/voice/voice-ai/xTests/MessageContextTests.swift similarity index 57% rename from voice/voice-ai/xTests/OpenAITests.swift rename to voice/voice-ai/xTests/MessageContextTests.swift index 4ad41abaf..cdfa9e4cd 100644 --- a/voice/voice-ai/xTests/OpenAITests.swift +++ b/voice/voice-ai/xTests/MessageContextTests.swift @@ -24,59 +24,6 @@ class OpenAIServiceTests: XCTestCase { } } -class OpenAIResponseTests: XCTestCase { - - func testInit() throws { - // Given - let json = """ - { - "id": "123", - "object": "response", - "created": 1635790771, - "model": "gpt-3.5-turbo", - "choices": [ - { - "message": { - "role": "user", - "content": "Hi" - }, - "finish_reason": "OK", - "index": 1 - - }, - ], - "usage": { - "prompt_tokens": 10, - "completion_tokens": 50, - "total_tokens": 60 - } - } - """ - - // When - let jsonData = Data(json.utf8) - let response = try JSONDecoder().decode(OpenAIResponse.self, from: jsonData) - - // Then - XCTAssertEqual(response.id, "123") - XCTAssertEqual(response.object, "response") - XCTAssertEqual(response.created, 1635790771) - XCTAssertEqual(response.model, "gpt-3.5-turbo") - - XCTAssertEqual(response.choices?.count, 1) - XCTAssertEqual(response.choices?[0].message?.role, "user") - XCTAssertEqual(response.choices?[0].message?.content, "Hi") - - XCTAssertNotNil(response.usage) - XCTAssertEqual(response.usage?.prompt_tokens, 10) - XCTAssertEqual(response.usage?.completion_tokens, 50) - XCTAssertEqual(response.usage?.total_tokens, 60) - } - - // Add more test cases as needed - -} - class MessageTests: XCTestCase { func testInitialization() { // Test initializing a Message instance diff --git a/voice/voice-ai/xTests/MockSpeechRecognition.swift b/voice/voice-ai/xTests/MockSpeechRecognition.swift index 0994c797d..6a850914d 100644 --- a/voice/voice-ai/xTests/MockSpeechRecognition.swift +++ b/voice/voice-ai/xTests/MockSpeechRecognition.swift @@ -16,13 +16,19 @@ class MockSpeechRecognition: SpeechRecognitionProtocol { var repeateCalled: Bool = false private var _isPlaying = false + private var _isPausing = false private var isPlayingSubject = PassthroughSubject() + private var isPausingSubject = PassthroughSubject() // Define a custom publisher and use isPlayingSubject to emit values - var isPlaingPublisher: AnyPublisher { + var isPlayingPublisher: AnyPublisher { return isPlayingSubject.eraseToAnyPublisher() } + + var isPausingPublisher: AnyPublisher { + return isPausingSubject.eraseToAnyPublisher() + } // Implement a method to update the value and notify subscribers func setIsPlaying(_ isPlaying: Bool) { @@ -30,6 +36,11 @@ class MockSpeechRecognition: SpeechRecognitionProtocol { isPlayingSubject.send(isPlaying) } + func setIsPausing(_ isPausing: Bool) { + _isPausing = isPausing + isPausingSubject.send(isPausing) + } + func pause(feedback: Bool?) {} func surprise() { diff --git a/voice/voice-ai/xTests/OpenAIUtilsTests.swift b/voice/voice-ai/xTests/OpenAIUtilsTests.swift index 11d9e1339..72bd5b3d4 100644 --- a/voice/voice-ai/xTests/OpenAIUtilsTests.swift +++ b/voice/voice-ai/xTests/OpenAIUtilsTests.swift @@ -33,11 +33,10 @@ final class OpenAIUtilsTests: XCTestCase { Voice_AI.Message(role: "assistant", content: "Please adhere to the community guidelines."), ]; - let limitedConversation = Voice_AI.OpenAIUtils.limitConversationContext(conversation, charactersCount: 52) + let limitedConversation = Voice_AI.OpenAIUtils.limitConversationContext(conversation, charactersCount: 43) XCTAssertEqual(limitedConversation.count, 2, "conversation should contain 2 messages") - XCTAssertEqual(limitedConversation[0].content, "confirmed.") - XCTAssertEqual(limitedConversation[1].content, "Please adhere to the community guidelines.") + } func testShouldReturnAllMessages() throws { @@ -50,9 +49,7 @@ final class OpenAIUtilsTests: XCTestCase { let limitedConversation = Voice_AI.OpenAIUtils.limitConversationContext(conversation, charactersCount: 100) XCTAssertEqual(limitedConversation.count, 3, "conversation should contain all messages") - XCTAssertEqual(limitedConversation[0].content, "Welcome to the platform!") - XCTAssertEqual(limitedConversation[1].content, "Your order has been confirmed.") - XCTAssertEqual(limitedConversation[2].content, "Please adhere to the community guidelines.") + } func testShouldFilterEmptyConversation() throws { @@ -60,35 +57,25 @@ final class OpenAIUtilsTests: XCTestCase { let limitedEmpty = Voice_AI.OpenAIUtils.limitConversationContext(emptyConversation, charactersCount: 100) + XCTAssertEqual(limitedEmpty.count, 0, "conversation should contain all messages") } func testShouldFilterEmptyMessages() throws { - let conversation: [Voice_AI.Message] = [ + let emptyConversation: [Voice_AI.Message] = [ Voice_AI.Message(role: "assistant", content: ""), Voice_AI.Message(role: "assistant", content: "Please adhere to the community guidelines."), Voice_AI.Message(role: "assistant", content: ""), Voice_AI.Message(role: "assistant", content: nil), ]; - let cleanConversation = Voice_AI.OpenAIUtils.limitConversationContext(conversation, charactersCount: 100) - - XCTAssertEqual(cleanConversation.count, 1) - XCTAssertEqual(cleanConversation[0].content, "Please adhere to the community guidelines.") - } - - func shouldPreserveOrderOfMessages() throws { - let conversation: [Voice_AI.Message] = [ - Voice_AI.Message(role: "assistant", content: "one"), - Voice_AI.Message(role: "assistant", content: "two"), - Voice_AI.Message(role: "assistant", content: "three"), - ]; + let limitedEmpty = Voice_AI.OpenAIUtils.limitConversationContext(emptyConversation, charactersCount: 100) - let limitedc = Voice_AI.OpenAIUtils.limitConversationContext(conversation, charactersCount: 100) - XCTAssertEqual(limitedc[0].content, "one") - XCTAssertEqual(limitedc[1].content, "two") - XCTAssertEqual(limitedc[3].content, "three") + XCTAssertEqual(limitedEmpty.count, 1, "conversation should contain all messages") } + + + } diff --git a/voice/voice-ai/xTests/SpeechRecognitionTests.swift b/voice/voice-ai/xTests/SpeechRecognitionTests.swift index 23efaedca..2999c50dc 100644 --- a/voice/voice-ai/xTests/SpeechRecognitionTests.swift +++ b/voice/voice-ai/xTests/SpeechRecognitionTests.swift @@ -1,5 +1,7 @@ @testable import Voice_AI import XCTest +import StoreKit +import SwiftUI // struct SpeechRecognitionProtocolTest: SpeechRecognitionProtocol {} // @@ -9,13 +11,21 @@ import XCTest // } class SpeechRecognitionTests: XCTestCase { + + var speechRecognition: MockSpeechRecognition! + + override func setUp() { + super.setUp() + speechRecognition = MockSpeechRecognition() + } + // Test the `isPaused()` function func testIsPaused() { // Create a mock SpeechRecognition object let mockSpeechRecognition = MockSpeechRecognition() // Call the `isPaused()` function - let paused = mockSpeechRecognition.isPaused() + _ = mockSpeechRecognition.isPaused() // Assert that the `isPausedCalled` property is set to `true` XCTAssertTrue(mockSpeechRecognition.isPausedCalled) @@ -81,10 +91,59 @@ class SpeechRecognitionTests: XCTestCase { XCTAssertTrue(mockSpeechRecognition.pauseCalled) } - func testIsPlaningPublisherGetter() { +// func testIsPlayingPublisher() { +// // Given +// var receivedIsPlayingValues: [Bool] = [] +// let expectation = XCTestExpectation(description: "Received values from isPlayingPublisher") +// +// // When +// let publisher = speechRecognition.isPlayingPublisher +// let cancellable = publisher.sink { value in +// receivedIsPlayingValues.append(value) +// expectation.fulfill() +// } +// +// // Simulate changes in the isPlaying state +// speechRecognition._isPlaying = true +// speechRecognition._isPlaying = false +// speechRecognition._isPlaying = true +// +// // Then +// wait(for: [expectation], timeout: 1.0) +// XCTAssertEqual(receivedIsPlayingValues, [false, true, false, true]) +// +// cancellable.cancel() +// } + + func testIsPausedPublisher() { + // Given + var receivedIsPausedValues: [Bool] = [] + let expectation = XCTestExpectation(description: "Received values from isPausedPublisher") +// var speechRecognition: MockSpeechRecognition = MockSpeechRecognition() + + // When + let publisher = speechRecognition.isPlayingPublisher + let cancellable = publisher.sink { value in + receivedIsPlayingValues.append(value) + expectation.fulfill() + } + + // Simulate changes in the isPaused state using the mock + speechRecognition.setIsPausing(true) + speechRecognition.setIsPausing(false) + speechRecognition.setIsPausing(true) + + // Then + wait(for: [expectation], timeout: 1.0) + XCTAssertEqual(receivedIsPausedValues, [true, false, true]) + + cancellable.cancel() + } + + func testIsPlayingPublisherGetter() { let mockSpeechRecognition = MockSpeechRecognition() - let isPlayingPublisher = mockSpeechRecognition.isPlaingPublisher + let isPlayingPublisher = mockSpeechRecognition.isPlayingPublisher // Assert: Verify the result var isPlaying = false