Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

take playAndRecord permission only when recording(fixes #1209) #1401

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 24 additions & 4 deletions submodules/Camera/Sources/Camera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ private final class CameraContext {

self.positionValue = configuration.position
self._positionPromise = ValuePromise<Camera.Position>(configuration.position)
self.audioEnabled = configuration.audio

self.setDualCameraEnabled(configuration.isDualEnabled, change: false)

Expand Down Expand Up @@ -274,7 +275,7 @@ private final class CameraContext {
let preferWide = self.initialConfiguration.preferWide || isRoundVideo
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || isRoundVideo

mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo)
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate, switchAudio: !isRoundVideo)
if isRoundVideo {
mainDeviceContext.output.markPositionChange(position: targetPosition)
}
Expand All @@ -297,14 +298,25 @@ private final class CameraContext {
let preferWide = self.initialConfiguration.preferWide || (self.positionValue == .front && self.initialConfiguration.isRoundVideo)
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo

self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)

self.queue.after(0.5) {
self.modeChange = .none
}
}
}

private var audioEnabled: Bool = false
public func attachAudio() {
self.configure {
self.mainDeviceContext?.invalidate(switchAudio: true)
let preferWide = self.initialConfiguration.preferWide || self.initialConfiguration.isRoundVideo
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo

self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: true, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
}
}

private var micLevelPeak: Int16 = 0
private var micLevelPeakCount = 0

Expand All @@ -323,7 +335,7 @@ private final class CameraContext {
self.configure {
self.mainDeviceContext?.invalidate()
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo)
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)

self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo)
self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false)
Expand Down Expand Up @@ -368,7 +380,7 @@ private final class CameraContext {
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo

self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo)
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.audioEnabled, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self, let mainDeviceContext = self.mainDeviceContext else {
Expand Down Expand Up @@ -910,6 +922,14 @@ public final class Camera {
}
}

public func attachAudio() {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.attachAudio()
}
}
}

public func setTorchActive(_ active: Bool) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,8 @@ private final class CameraScreenComponent: CombinedComponent {
private let getController: () -> CameraScreen?

private var resultDisposable = MetaDisposable()

private var audioCaptureDisposable = MetaDisposable()

private var mediaAssetsContext: MediaAssetsContext?
fileprivate var lastGalleryAsset: PHAsset?
private var lastGalleryAssetsDisposable: Disposable?
Expand Down Expand Up @@ -283,6 +284,10 @@ private final class CameraScreenComponent: CombinedComponent {
deinit {
self.lastGalleryAssetsDisposable?.dispose()
self.resultDisposable.dispose()
self.audioCaptureDisposable.dispose()
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false)
}
}

func setupRecentAssetSubscription() {
Expand Down Expand Up @@ -663,9 +668,22 @@ private final class CameraScreenComponent: CombinedComponent {
}))
}

let startCapturingSound = {
self.audioCaptureDisposable.set(self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .record(speaker: false, video: true, withOthers: true), activate: { _ in
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
}
camera.attachAudio()
startRecording()
}, deactivate: { _ in
return .single(Void())
})
)
}

controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(0.0) }, transition: .spring(duration: 0.4))

startRecording()
startCapturingSound()
}

func stopVideoRecording() {
Expand Down Expand Up @@ -696,6 +714,10 @@ private final class CameraScreenComponent: CombinedComponent {
self.isTransitioning = false
self.updated(transition: .immediate)
})
self.audioCaptureDisposable.dispose()
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false)
}

controller.updateCameraState({ $0.updatedRecording(.none).updatedDuration(0.0) }, transition: .spring(duration: 0.4))

Expand Down Expand Up @@ -1770,7 +1792,7 @@ public class CameraScreen: ViewController {
preset: .hd1920x1080,
position: self.cameraState.position,
isDualEnabled: self.cameraState.isDualCameraEnabled,
audio: true,
audio: false,
photo: true,
metadata: false
),
Expand Down Expand Up @@ -2789,8 +2811,6 @@ public class CameraScreen: ViewController {
public var transitionedIn: () -> Void = {}
public var transitionedOut: () -> Void = {}

private var audioSessionDisposable: Disposable?

private let postingAvailabilityPromise = Promise<StoriesUploadAvailability>()
private var postingAvailabilityDisposable: Disposable?

Expand Down Expand Up @@ -2835,8 +2855,6 @@ public class CameraScreen: ViewController {

self.navigationPresentation = .flatModal

self.requestAudioSession()

if case .story = mode {
self.postingAvailabilityPromise.set(self.context.engine.messages.checkStoriesUploadAvailability(target: .myStories))
}
Expand All @@ -2847,11 +2865,7 @@ public class CameraScreen: ViewController {
}

deinit {
self.audioSessionDisposable?.dispose()
self.postingAvailabilityDisposable?.dispose()
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false)
}
}

override public func loadDisplayNode() {
Expand Down Expand Up @@ -2917,16 +2931,6 @@ public class CameraScreen: ViewController {
}
}

private func requestAudioSession() {
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .record(speaker: false, video: true, withOthers: true), activate: { _ in
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
}
}, deactivate: { _ in
return .single(Void())
})
}

private var galleryController: ViewController?
public func returnFromEditor() {
self.node.animateInFromEditor(toGallery: self.galleryController?.displayNode.supernode != nil)
Expand Down