diff --git a/Examples/iOS/Screencast/SampleHandler.swift b/Examples/iOS/Screencast/SampleHandler.swift index 26013f5e3..371cdb511 100644 --- a/Examples/iOS/Screencast/SampleHandler.swift +++ b/Examples/iOS/Screencast/SampleHandler.swift @@ -24,8 +24,9 @@ open class SampleHandler: RPBroadcastSampleHandler { }() private lazy var rtmpStream: RTMPStream = { - FeatureUtil.setEnabled(for: .multiTrackAudioMixing, isEnabled: true) - return RTMPStream(connection: rtmpConnection) + let stream = RTMPStream(connection: rtmpConnection) + stream.isMultiTrackAudioMixingEnabled = true + return stream }() private var needVideoConfiguration = true diff --git a/Examples/macOS/AppDelegate.swift b/Examples/macOS/AppDelegate.swift index 60090a500..aab82513e 100644 --- a/Examples/macOS/AppDelegate.swift +++ b/Examples/macOS/AppDelegate.swift @@ -10,6 +10,5 @@ class AppDelegate: NSObject, NSApplicationDelegate { func applicationDidFinishLaunching(_ aNotification: Notification) { LBLogger.with(HaishinKitIdentifier).level = .info - FeatureUtil.setEnabled(for: .multiTrackAudioMixing, isEnabled: true) } } diff --git a/Examples/macOS/CameraIngestViewController.swift b/Examples/macOS/CameraIngestViewController.swift index fc4515f66..5e5174b92 100644 --- a/Examples/macOS/CameraIngestViewController.swift +++ b/Examples/macOS/CameraIngestViewController.swift @@ -34,6 +34,8 @@ final class CameraIngestViewController: NSViewController { override func viewDidAppear() { super.viewDidAppear() + + stream.isMultiTrackAudioMixingEnabled = true stream.videoMixerSettings.mode = .offscreen stream.screen.startRunning() @@ -80,7 +82,7 @@ final class CameraIngestViewController: NSViewController { var audios = AVCaptureDevice.devices(for: .audio) audios.removeFirst() - if let device = audios.first, FeatureUtil.isEnabled(for: .multiTrackAudioMixing) { + if let device = audios.first, stream.isMultiTrackAudioMixingEnabled { stream.attachAudio(device, track: 1) } diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 95f4daa1b..2cf4bf667 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -181,7 +181,6 @@ BC4078C42AD5CC7E00BBB4FA /* IOMuxer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */; }; BC4231642BCA5F28003A80DC /* IOAudioMixerByMultiTrack.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4231632BCA5F28003A80DC /* IOAudioMixerByMultiTrack.swift */; }; BC42316A2BCA8BE5003A80DC /* IOAudioMixerBySingleTrack.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4231692BCA8BE5003A80DC /* IOAudioMixerBySingleTrack.swift */; }; - BC42316C2BCB7084003A80DC /* FeatureUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC42316B2BCB7084003A80DC /* FeatureUtil.swift */; }; BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */; }; BC4914A628DDD367009E2DF6 /* VTSessionOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */; }; BC4914AE28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */; }; @@ -661,7 +660,6 @@ BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOMuxer.swift; sourceTree = ""; }; BC4231632BCA5F28003A80DC /* IOAudioMixerByMultiTrack.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioMixerByMultiTrack.swift; sourceTree = ""; }; BC4231692BCA8BE5003A80DC /* IOAudioMixerBySingleTrack.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioMixerBySingleTrack.swift; sourceTree = ""; }; - BC42316B2BCB7084003A80DC /* FeatureUtil.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeatureUtil.swift; sourceTree = ""; }; BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionConvertible.swift; sourceTree = ""; }; BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionOption.swift; sourceTree = ""; }; BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTDecompressionSession+Extension.swift"; sourceTree = ""; }; @@ -877,7 +875,6 @@ BC0D236C26331BAB001DDA0C /* DataBuffer.swift */, 29B876671CD70AB300FC07DA /* DataConvertible.swift */, 2976A4851D4903C300B53EF2 /* DeviceUtil.swift */, - BC42316B2BCB7084003A80DC /* FeatureUtil.swift */, BC32E88729C9971100051507 /* InstanceHolder.swift */, 2942424C1CF4C01300D65DCB /* MD5.swift */, 2942A4F721A9418A004E1BEE /* Running.swift */, @@ -1920,7 +1917,6 @@ BC0F1FDC2ACC630400C326FF /* NSView+Extension.swift in Sources */, BC16019C2BE0E4750061BD3E /* ScreenObject.swift in Sources */, 29EA87E21E79A1E90043A5F8 /* CMVideoFormatDescription+Extension.swift in Sources */, - BC42316C2BCB7084003A80DC /* FeatureUtil.swift in Sources */, BC110253292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */, BC1DC4A429F4F74F00E928ED /* AVCaptureSession+Extension.swift in Sources */, 29EA87D81E79A0090043A5F8 /* URL+Extension.swift in Sources */, diff --git a/README.md b/README.md index 830148ed4..7dc17ca4c 100644 --- a/README.md +++ b/README.md @@ -308,9 +308,9 @@ stream.attachAudio(front, track: 0) { audioUnit, error in ``` #### [AudioMixerSettings](https://shogo4405.github.io/HaishinKit.swift/Structs/IOAudioMixerSettings.html) -If you want to mix multiple audio tracks, please enable the Feature flag. +If you want to mix multiple audio tracks, please enable the feature flag. ```swift -FeatureUtil.setEnabled(for: .multiTrackAudioMixing, isEnabled: true) +stream.isMultiTrackAudioMixingEnabled = true ``` When you specify the sampling rate, it will perform resampling. Additionally, in the case of multiple channels, downsampling can be applied. diff --git a/Sources/IO/IOAudioUnit.swift b/Sources/IO/IOAudioUnit.swift index 5dbd51ea5..bdd696334 100644 --- a/Sources/IO/IOAudioUnit.swift +++ b/Sources/IO/IOAudioUnit.swift @@ -25,15 +25,6 @@ protocol IOAudioUnitDelegate: AnyObject { final class IOAudioUnit: IOUnit { let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.IOAudioUnit.lock") weak var mixer: IOMixer? - var isMonitoringEnabled = false { - didSet { - if isMonitoringEnabled { - monitor.startRunning() - } else { - monitor.stopRunning() - } - } - } var settings: AudioCodecSettings { get { codec.settings @@ -50,6 +41,16 @@ final class IOAudioUnit: IOUnit { audioMixer.settings = newValue } } + var isMonitoringEnabled = false { + didSet { + if isMonitoringEnabled { + monitor.startRunning() + } else { + monitor.stopRunning() + } + } + } + var isMultiTrackAudioMixingEnabled = false var isRunning: Atomic { return codec.isRunning } @@ -65,7 +66,7 @@ final class IOAudioUnit: IOUnit { return codec }() private lazy var audioMixer: any IOAudioMixerConvertible = { - if FeatureUtil.isEnabled(for: .multiTrackAudioMixing) { + if isMultiTrackAudioMixingEnabled { var audioMixer = IOAudioMixerByMultiTrack() audioMixer.delegate = self return audioMixer diff --git a/Sources/IO/IOStream.swift b/Sources/IO/IOStream.swift index 3955ed5cc..cd36d51a3 100644 --- a/Sources/IO/IOStream.swift +++ b/Sources/IO/IOStream.swift @@ -145,6 +145,17 @@ open class IOStream: NSObject { } #endif + /// Specifies the feature to mix multiple audio tracks. For example, it is possible to mix .appAudio and .micAudio from ReplayKit. + /// Warning: If there is a possibility of this feature, please set it to true initially. + public var isMultiTrackAudioMixingEnabled: Bool { + get { + return mixer.audioIO.isMultiTrackAudioMixingEnabled + } + set { + mixer.audioIO.isMultiTrackAudioMixingEnabled = newValue + } + } + /// Specifies the sessionPreset for the AVCaptureSession. @available(tvOS 17.0, *) public var sessionPreset: AVCaptureSession.Preset { diff --git a/Sources/IO/PiPHKView.swift b/Sources/IO/PiPHKView.swift index 6de6138cd..fc908c383 100644 --- a/Sources/IO/PiPHKView.swift +++ b/Sources/IO/PiPHKView.swift @@ -204,10 +204,6 @@ public class PiPHKView: NSView { super.init(coder: aDecoder) } - deinit { - attachStream(nil) - } - /// Prepares the receiver for service after it has been loaded from an Interface Builder archive, or nib file. override public func awakeFromNib() { super.awakeFromNib() diff --git a/Sources/Util/FeatureUtil.swift b/Sources/Util/FeatureUtil.swift deleted file mode 100644 index e53bcd083..000000000 --- a/Sources/Util/FeatureUtil.swift +++ /dev/null @@ -1,43 +0,0 @@ -import Foundation - -/// The util object to get feature flag info. -public enum FeatureUtil { - /// A structure that defines the name of a feature. - public struct Name: Sendable, RawRepresentable, ExpressibleByStringLiteral { - // swiftlint:disable:next nesting - public typealias RawValue = String - // swiftlint:disable:next nesting - public typealias StringLiteralType = String - - /// This is a feature to mix multiple audio tracks. For example, it is possible to mix .appAudio and .micAudio from ReplayKit. - public static let multiTrackAudioMixing: Name = "multiTrackAudioMixing" - - /// The raw type value. - public let rawValue: String - - /// Create a feature name by rawValue. - public init(rawValue: String) { - self.rawValue = rawValue - } - - /// Create a feature name by stringLiteral. - public init(stringLiteral value: String) { - self.rawValue = value - } - } - - private static var flags: [String: Bool] = [:] - - /// Whether or not a flag is enabled. - public static func isEnabled(for feature: Name) -> Bool { - return flags[feature.rawValue] ?? false - } - - /// Setter for a feature flag. - public static func setEnabled( - for feature: Name, - isEnabled: Bool - ) { - flags[feature.rawValue] = isEnabled - } -}