Skip to content

Commit

Permalink
add auto speakk question
Browse files Browse the repository at this point in the history
  • Loading branch information
xqsadness committed Sep 15, 2023
1 parent 505b2be commit 42f6399
Show file tree
Hide file tree
Showing 10 changed files with 103 additions and 5 deletions.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,22 @@
uuid = "40680EE2-D096-4625-B9E1-B90CF7A46FFB"
type = "0"
version = "2.0">
<Breakpoints>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.FileBreakpoint">
<BreakpointContent
uuid = "EE60291D-9C31-412C-AE2A-E96E3A83A2C6"
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
filePath = "DefaultProject/ViewModel/SpeechRecognizer.swift"
startingColumnNumber = "9223372036854775807"
endingColumnNumber = "9223372036854775807"
startingLineNumber = "101"
endingLineNumber = "101"
landmarkName = "transcribe()"
landmarkType = "7">
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>
3 changes: 2 additions & 1 deletion DefaultProject/View/ChooseLanguageView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import SwiftUI
import CrowdinSDK

struct ChooseLanguageView: SwiftUI.View {
struct ChooseLanguageView: SwiftUI.View {
@AppStorage("Language") var language: String = "en"
@EnvironmentObject var coordinator: Coordinator
@State var languagesList: [Language] = [.english(.us), .vietnamese, .french, .russian]
Expand Down Expand Up @@ -51,6 +51,7 @@ struct ChooseLanguageView: SwiftUI.View {
.background(ele.code == language ? Color(hex: "554BD8") : Color.text)
.cornerRadius(10)
.contentShape(Rectangle())
.animation(.easeInOut, value: language)
.onTapGesture {
withAnimation {
language = ele.code
Expand Down
21 changes: 21 additions & 0 deletions DefaultProject/View/History/HistoryView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,13 @@ struct HistoryView: View {
.contentShape(Rectangle()).gesture(DragGesture())
.onAppear{
answerCorrect = CONSTANT.SHARED.DATA_HISTORY[index].answer
DispatchQueue.main.asyncAfter(deadline: .now() + 0.75 ,execute: {
if !synthesizer.isSpeaking{
speakText(textToSpeak: CONSTANT.SHARED.DATA_HISTORY[index].question.cw_localized)
}else{
synthesizer.stopSpeaking(at: .immediate)
}
})
}
}
}
Expand All @@ -71,5 +78,19 @@ struct HistoryView: View {
PopupScoreView(isShowPopup: $isShowPopup, countCorrect: $countCorrect, countWrong: $countWrong, totalQuestion: CONSTANT.SHARED.DATA_HISTORY.count)
}
}

func speakText(textToSpeak: String) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: .default, options: .defaultToSpeaker)
try AVAudioSession.sharedInstance().setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("audioSession properties weren't set because of an error.")
}

let utterance = AVSpeechUtterance(string: textToSpeak)
utterance.voice = AVSpeechSynthesisVoice(language: language)
utterance.rate = 0.3
synthesizer.speak(utterance)
}
}

2 changes: 0 additions & 2 deletions DefaultProject/View/Home/HomeView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,6 @@ struct HomeView: View {
.padding(.horizontal)
.background(Color(red: 0.89, green: 0.79, blue: 0.98))
.onAppear{
print(CONSTANT.SHARED.DATA_COLOR)

switch language{
case "en":
selectedLanguage = .english(.us)
Expand Down
21 changes: 21 additions & 0 deletions DefaultProject/View/Listen/ListeningView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,13 @@ struct ListeningView: View {
.contentShape(Rectangle()).gesture(DragGesture())
.onAppear{
answerCorrect = CONSTANT.SHARED.DATA_LISTEN[index].answer
DispatchQueue.main.asyncAfter(deadline: .now() + 0.75 ,execute: {
if !synthesizer.isSpeaking{
speakText(textToSpeak: CONSTANT.SHARED.DATA_LISTEN[index].answer.cw_localized)
}else{
synthesizer.stopSpeaking(at: .immediate)
}
})
}
}
}
Expand All @@ -70,5 +77,19 @@ struct ListeningView: View {
PopupScoreView(isShowPopup: $isShowPopup, countCorrect: $countCorrect, countWrong: $countWrong, totalQuestion: CONSTANT.SHARED.DATA_LISTEN.count)
}
}

func speakText(textToSpeak: String) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: .default, options: .defaultToSpeaker)
try AVAudioSession.sharedInstance().setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("audioSession properties weren't set because of an error.")
}

let utterance = AVSpeechUtterance(string: textToSpeak)
utterance.voice = AVSpeechSynthesisVoice(language: language)
utterance.rate = 0.3
synthesizer.speak(utterance)
}
}

14 changes: 14 additions & 0 deletions DefaultProject/View/ListenEndRepeat/ListenAndRepeatView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,13 @@ struct ListenAndRepeatView: View {
.contentShape(Rectangle()).gesture(DragGesture())
.onAppear{
answerCorrect = CONSTANT.SHARED.DATA_LISTEN_AND_REPEAT[index].answer
DispatchQueue.main.asyncAfter(deadline: .now() + 0.8 ,execute: {
if !synthesizer.isSpeaking{
speakText(textToSpeak: CONSTANT.SHARED.DATA_LISTEN_AND_REPEAT[index].answer.cw_localized)
}else{
synthesizer.stopSpeaking(at: .immediate)
}
})
}
}
}
Expand Down Expand Up @@ -202,6 +209,13 @@ struct ListenAndRepeatView: View {
speechRecognizer.transcript = ""
}

func speakText(textToSpeak: String) {
let utterance = AVSpeechUtterance(string: textToSpeak)
utterance.voice = AVSpeechSynthesisVoice(language: language)
utterance.rate = 0.4
synthesizer.speak(utterance)
}

func resetSpeak(){
speechRecognizer.transcript = ""
speechRecognizer.reset()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ struct SurroundingObjectView: View {
.onAppear{
listText.removeAll()
listImg.removeAll()

print(CONSTANT.SHARED.DATA_SURROUNDING)
if listText.isEmpty{
listText = CONSTANT.SHARED.DATA_SURROUNDING
listText = listText.shuffled()
Expand Down
23 changes: 23 additions & 0 deletions DefaultProject/View/Writing/WritingView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,15 @@ struct WritingView: View {
TabView(selection: $selectedTab) {
ForEach(CONSTANT.SHARED.DATA_WRITING.indices, id: \.self) { index in
QuizWritingContentView(audioPlayer: audioPlayer ,synthesizer: synthesizer, textWriting: $textWriting, answer: $answer, countCorrect: $countCorrect, countWrong: $countWrong, index: index, selectedTab: $selectedTab, isCorrect: $isCorrect, isShowPopup: $isShowPopup, isShowPopupCheck: $isShowPopupCheck)
.onAppear{
DispatchQueue.main.asyncAfter(deadline: .now() + 0.8 ,execute: {
if !synthesizer.isSpeaking{
speakText(textToSpeak: CONSTANT.SHARED.DATA_WRITING[index].answer.cw_localized)
}else{
synthesizer.stopSpeaking(at: .immediate)
}
})
}
}
}
.tabViewStyle(.page(indexDisplayMode: .never))
Expand Down Expand Up @@ -121,6 +130,20 @@ struct WritingView: View {
}
}

func speakText(textToSpeak: String) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: .default, options: .defaultToSpeaker)
try AVAudioSession.sharedInstance().setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("audioSession properties weren't set because of an error.")
}

let utterance = AVSpeechUtterance(string: textToSpeak)
utterance.voice = AVSpeechSynthesisVoice(language: language)
utterance.rate = 0.3
synthesizer.speak(utterance)
}

func loadAudio(nameSound: String) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: .default, options: .defaultToSpeaker)
Expand Down
4 changes: 3 additions & 1 deletion DefaultProject/ViewModel/SpeechRecognizer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import SwiftUI

// A helper for transcribing speech to text using SFSpeechRecognizer and AVAudioEngine.
class SpeechRecognizer: ObservableObject {
@AppStorage("Language") var language: String = "en"
@AppStorage("Language") var language: String = "en"
enum RecognizerError: Error {
case nilRecognizer
case notAuthorizedToRecognize
Expand Down Expand Up @@ -97,6 +97,8 @@ class SpeechRecognizer: ObservableObject {

if let result = result {
self.speak(result.bestTranscription.formattedString)

print(result.bestTranscription.segments)
}
}
} catch {
Expand Down

0 comments on commit 42f6399

Please sign in to comment.