diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index d1f95d0bc99..d5a47c92f4d 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -171,6 +171,7 @@ private final class CameraContext { self.positionValue = configuration.position self._positionPromise = ValuePromise(configuration.position) + self.audioEnabled = configuration.audio self.setDualCameraEnabled(configuration.isDualEnabled, change: false) @@ -274,7 +275,7 @@ private final class CameraContext { let preferWide = self.initialConfiguration.preferWide || isRoundVideo let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || isRoundVideo - mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo) + mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo) if isRoundVideo { mainDeviceContext.output.markPositionChange(position: targetPosition) } @@ -297,7 +298,7 @@ private final class CameraContext { let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo) let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo - self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) + self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) self.queue.after(0.5) { self.modeChange = .none @@ -305,6 +306,17 @@ private final class CameraContext { } } + private var audioEnabled: Bool = false + public func attachAudio() { + self.configure { + self.mainDeviceContext?.invalidate(switchAudio: true) + let preferWide = self.initialConfiguration.preferWide || self.initialConfiguration.isRoundVideo + let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo + + self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: true, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) + } + } + private var micLevelPeak: Int16 = 0 private var micLevelPeakCount = 0 @@ -323,7 +335,7 @@ private final class CameraContext { self.configure { self.mainDeviceContext?.invalidate() self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo) - self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata) + self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata) self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo) self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false) @@ -368,7 +380,7 @@ private final class CameraContext { let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo) - self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) + self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate) } self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in guard let self, let mainDeviceContext = self.mainDeviceContext else { @@ -910,6 +922,14 @@ public final class Camera { } } + public func attachAudio() { + self.queue.async { + if let context = self.contextRef?.takeUnretainedValue() { + context.attachAudio() + } + } + } + public func setTorchActive(_ active: Bool) { self.queue.async { if let context = self.contextRef?.takeUnretainedValue() { diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index 3051825902e..40239b34e2c 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -229,7 +229,8 @@ private final class CameraScreenComponent: CombinedComponent { private let getController: () -> CameraScreen? private var resultDisposable = MetaDisposable() - + private var audioCaptureDisposable = MetaDisposable() + private var mediaAssetsContext: MediaAssetsContext? fileprivate var lastGalleryAsset: PHAsset? private var lastGalleryAssetsDisposable: Disposable? @@ -283,6 +284,10 @@ private final class CameraScreenComponent: CombinedComponent { deinit { self.lastGalleryAssetsDisposable?.dispose() self.resultDisposable.dispose() + self.audioCaptureDisposable.dispose() + if #available(iOS 13.0, *) { + try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false) + } } func setupRecentAssetSubscription() { @@ -663,9 +668,22 @@ private final class CameraScreenComponent: CombinedComponent { })) } + let startCapturingSound = { + self.audioCaptureDisposable.set(self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .record(speaker: false, video: true, withOthers: true), activate: { _ in + if #available(iOS 13.0, *) { + try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) + } + camera.attachAudio() + startRecording() + }, deactivate: { _ in + return .single(Void()) + }) + ) + } + controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(0.0) }, transition: .spring(duration: 0.4)) - startRecording() + startCapturingSound() } func stopVideoRecording() { @@ -696,6 +714,10 @@ private final class CameraScreenComponent: CombinedComponent { self.isTransitioning = false self.updated(transition: .immediate) }) + self.audioCaptureDisposable.dispose() + if #available(iOS 13.0, *) { + try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false) + } controller.updateCameraState({ $0.updatedRecording(.none).updatedDuration(0.0) }, transition: .spring(duration: 0.4)) @@ -1770,7 +1792,7 @@ public class CameraScreen: ViewController { preset: .hd1920x1080, position: self.cameraState.position, isDualEnabled: self.cameraState.isDualCameraEnabled, - audio: true, + audio: false, photo: true, metadata: false ), @@ -2789,8 +2811,6 @@ public class CameraScreen: ViewController { public var transitionedIn: () -> Void = {} public var transitionedOut: () -> Void = {} - private var audioSessionDisposable: Disposable? - private let postingAvailabilityPromise = Promise() private var postingAvailabilityDisposable: Disposable? @@ -2835,8 +2855,6 @@ public class CameraScreen: ViewController { self.navigationPresentation = .flatModal - self.requestAudioSession() - if case .story = mode { self.postingAvailabilityPromise.set(self.context.engine.messages.checkStoriesUploadAvailability(target: .myStories)) } @@ -2847,11 +2865,7 @@ public class CameraScreen: ViewController { } deinit { - self.audioSessionDisposable?.dispose() self.postingAvailabilityDisposable?.dispose() - if #available(iOS 13.0, *) { - try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false) - } } override public func loadDisplayNode() { @@ -2917,16 +2931,6 @@ public class CameraScreen: ViewController { } } - private func requestAudioSession() { - self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .record(speaker: false, video: true, withOthers: true), activate: { _ in - if #available(iOS 13.0, *) { - try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) - } - }, deactivate: { _ in - return .single(Void()) - }) - } - private var galleryController: ViewController? public func returnFromEditor() { self.node.animateInFromEditor(toGallery: self.galleryController?.displayNode.supernode != nil)