diff --git a/.swiftlint.yml b/.swiftlint.yml index 223ef24db..c3b611bf8 100644 --- a/.swiftlint.yml +++ b/.swiftlint.yml @@ -19,7 +19,7 @@ excluded: opt_in_rules: # - anyobject_protocol - array_init - - attributes +# - attributes # - closure_body_length - closure_end_indentation - closure_spacing diff --git a/Examples/iOS/IngestViewController.swift b/Examples/iOS/IngestViewController.swift index 6dd03290d..ce2859bcc 100644 --- a/Examples/iOS/IngestViewController.swift +++ b/Examples/iOS/IngestViewController.swift @@ -18,7 +18,6 @@ final class IngestViewController: UIViewController { @IBOutlet private weak var audioDevicePicker: UIPickerView! @IBOutlet private weak var audioMonoStereoSegmentCOntrol: UISegmentedControl! - private var pipIntentView = UIView() private var currentEffect: VideoEffect? private var currentPosition: AVCaptureDevice.Position = .back private var retryCount: Int = 0 @@ -32,17 +31,22 @@ final class IngestViewController: UIViewController { audioCapture.delegate = self return audioCapture }() + private var videoScreenObject = VideoTrackScreenObject() override func viewDidLoad() { super.viewDidLoad() netStreamSwitcher.uri = Preference.defaultInstance.uri ?? "" - pipIntentView.layer.borderWidth = 1.0 - pipIntentView.layer.borderColor = UIColor.white.cgColor - pipIntentView.bounds = IOVideoMixerSettings.default.regionOfInterest - pipIntentView.isUserInteractionEnabled = true - view.addSubview(pipIntentView) + stream.videoMixerSettings.size = .init(width: 720, height: 1280) + stream.screen.backgroundColor = UIColor.white.cgColor + + videoScreenObject.cornerRadius = 16.0 + videoScreenObject.track = 1 + videoScreenObject.horizontalAlignment = .right + videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16) + videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2) + try? stream.screen.addChild(videoScreenObject) // If you want to use the multi-camera feature, please make sure stream.isMultiCamSessionEnabled = true. Before attachCamera or attachAudio. stream.isMultiCamSessionEnabled = true @@ -95,6 +99,14 @@ final class IngestViewController: UIViewController { NotificationCenter.default.removeObserver(self) } + override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) { + if UIDevice.current.orientation.isLandscape { + stream.videoMixerSettings.size = .init(width: 1280, height: 720) + } else { + stream.videoMixerSettings.size = .init(width: 720, height: 1280) + } + } + // swiftlint:disable:next block_based_kvo override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) { if Thread.isMainThread { @@ -102,35 +114,15 @@ final class IngestViewController: UIViewController { } } - override func touchesMoved(_ touches: Set, with event: UIEvent?) { - guard let touch = touches.first else { - return - } - if touch.view == pipIntentView { - let destLocation = touch.location(in: view) - let prevLocation = touch.previousLocation(in: view) - var currentFrame = pipIntentView.frame - let deltaX = destLocation.x - prevLocation.x - let deltaY = destLocation.y - prevLocation.y - currentFrame.origin.x += deltaX - currentFrame.origin.y += deltaY - pipIntentView.frame = currentFrame - stream.videoMixerSettings = IOVideoMixerSettings( - mode: stream.videoMixerSettings.mode, - cornerRadius: 16.0, - regionOfInterest: currentFrame, - direction: .east - ) - } - } - @IBAction func rotateCamera(_ sender: UIButton) { logger.info("rotateCamera") if stream.isMultiCamSessionEnabled { if stream.videoMixerSettings.mainTrack == 0 { stream.videoMixerSettings.mainTrack = 1 + videoScreenObject.track = 0 } else { stream.videoMixerSettings.mainTrack = 0 + videoScreenObject.track = 1 } } else { let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back diff --git a/Examples/macOS/CameraIngestViewController.swift b/Examples/macOS/CameraIngestViewController.swift index b86dfb36d..efc8a77eb 100644 --- a/Examples/macOS/CameraIngestViewController.swift +++ b/Examples/macOS/CameraIngestViewController.swift @@ -21,6 +21,7 @@ final class CameraIngestViewController: NSViewController { private var stream: IOStream { return netStreamSwitcher.stream } + private var textScreenObject = TextScreenObject() override func viewDidLoad() { super.viewDidLoad() @@ -33,6 +34,46 @@ final class CameraIngestViewController: NSViewController { override func viewDidAppear() { super.viewDidAppear() + + textScreenObject.horizontalAlignment = .right + textScreenObject.verticalAlignment = .bottom + textScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 16) + + stream.screen.backgroundColor = NSColor.black.cgColor + + let videoScreenObject = VideoTrackScreenObject() + videoScreenObject.cornerRadius = 32.0 + videoScreenObject.track = 1 + videoScreenObject.horizontalAlignment = .right + videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16) + videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2) + _ = videoScreenObject.registerVideoEffect(MonochromeEffect()) + + let imageScreenObject = ImageScreenObject() + let imageURL = URL(fileURLWithPath: Bundle.main.path(forResource: "game_jikkyou", ofType: "png") ?? "") + if let provider = CGDataProvider(url: imageURL as CFURL) { + imageScreenObject.verticalAlignment = .bottom + imageScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 0) + imageScreenObject.cgImage = CGImage( + pngDataProviderSource: provider, + decode: nil, + shouldInterpolate: false, + intent: .defaultIntent + ) + } else { + logger.info("no image") + } + + let assetScreenObject = AssetScreenObject() + assetScreenObject.size = .init(width: 180, height: 180) + assetScreenObject.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0) + try? assetScreenObject.startReading(AVAsset(url: URL(fileURLWithPath: Bundle.main.path(forResource: "SampleVideo_360x240_5mb", ofType: "mp4") ?? ""))) + try? stream.screen.addChild(assetScreenObject) + try? stream.screen.addChild(videoScreenObject) + try? stream.screen.addChild(imageScreenObject) + try? stream.screen.addChild(textScreenObject) + stream.screen.delegate = self + stream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio)) var audios = AVCaptureDevice.devices(for: .audio) @@ -79,3 +120,9 @@ final class CameraIngestViewController: NSViewController { stream.attachCamera(device, track: 0) } } + +extension CameraIngestViewController: ScreenDelegate { + func screen(_ screen: Screen, willLayout time: CMTime) { + textScreenObject.string = Date().description + } +} diff --git a/Examples/macOS/game_jikkyou.png b/Examples/macOS/game_jikkyou.png new file mode 100644 index 000000000..8ae028a75 Binary files /dev/null and b/Examples/macOS/game_jikkyou.png differ diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index f3b14f1a7..1b8fe2317 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -125,6 +125,12 @@ BC0587C12BD2A123006751C8 /* IOAudioMixerBySingleTrackTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0587C02BD2A123006751C8 /* IOAudioMixerBySingleTrackTests.swift */; }; BC0587C32BD2A5E8006751C8 /* IOAudioMixerByMultiTrackTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0587C22BD2A5E8006751C8 /* IOAudioMixerByMultiTrackTests.swift */; }; BC0587D22BD2CA7F006751C8 /* AudioStreamBasicDescription+DebugExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0587D12BD2CA7F006751C8 /* AudioStreamBasicDescription+DebugExtension.swift */; }; + BC0B5B122BE8CFA800D83F8E /* CMVideoDimention+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0B5B112BE8CFA800D83F8E /* CMVideoDimention+Extension.swift */; }; + BC0B5B142BE8DFE300D83F8E /* AVLayerVideoGravity+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0B5B132BE8DFE300D83F8E /* AVLayerVideoGravity+Extension.swift */; }; + BC0B5B172BE919D000D83F8E /* ScreenObjectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0B5B162BE919D000D83F8E /* ScreenObjectTests.swift */; }; + BC0B5B1C2BE925CA00D83F8E /* ScreenObjectContainerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0B5B1B2BE925CA00D83F8E /* ScreenObjectContainerTests.swift */; }; + BC0B5B1E2BE9310800D83F8E /* CMVideoSampleBufferFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0B5B1D2BE9310800D83F8E /* CMVideoSampleBufferFactory.swift */; }; + BC0B5B202BE931D600D83F8E /* VideoTrackScreenObjectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0B5B1F2BE931D600D83F8E /* VideoTrackScreenObjectTests.swift */; }; BC0BF4F22985FA9000D72CB4 /* HaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; }; BC0BF4F529866FDE00D72CB4 /* IOMixerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0BF4F429866FDE00D72CB4 /* IOMixerTests.swift */; }; BC0D236D26331BAB001DDA0C /* DataBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0D236C26331BAB001DDA0C /* DataBuffer.swift */; }; @@ -135,9 +141,12 @@ BC11024A2925147300D48035 /* IOCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1102492925147300D48035 /* IOCaptureUnit.swift */; }; BC110253292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */; }; BC110257292E661E00D48035 /* IOVideoMixerSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* IOVideoMixerSettings.swift */; }; + BC16019C2BE0E4750061BD3E /* ScreenObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC16019B2BE0E4750061BD3E /* ScreenObject.swift */; }; BC1720A92C03473200F65941 /* AVCDecoderConfigurationRecordTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1720A82C03473200F65941 /* AVCDecoderConfigurationRecordTests.swift */; }; BC1720B62C08C59B00F65941 /* TypedBlockQueue.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1720B52C08C59B00F65941 /* TypedBlockQueue.swift */; }; + BC1CCF582BE4D5150067198A /* game_jikkyou.png in Resources */ = {isa = PBXBuildFile; fileRef = BC1CCF572BE4D5150067198A /* game_jikkyou.png */; }; BC1CCF602BE53FE70067198A /* IOAudioTime.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1CCF5F2BE53FE70067198A /* IOAudioTime.swift */; }; + BC1CCF622BE66C220067198A /* CGImage+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1CCF612BE66C220067198A /* CGImage+Extension.swift */; }; BC1DC4A429F4F74F00E928ED /* AVCaptureSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */; }; BC1DC5042A02894D00E928ED /* FLVVideoFourCCTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1DC5032A02894D00E928ED /* FLVVideoFourCCTests.swift */; }; BC1DC50A2A039B4400E928ED /* HEVCDecoderConfigurationRecord.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1DC5092A039B4400E928ED /* HEVCDecoderConfigurationRecord.swift */; }; @@ -237,6 +246,7 @@ BCABED212BDE23C600CC7E73 /* AudioNode+DebugExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCABED202BDE23C600CC7E73 /* AudioNode+DebugExtension.swift */; }; BCB976DF26107B5600C9A649 /* TSField.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB976DE26107B5600C9A649 /* TSField.swift */; }; BCB9773F2621812800C9A649 /* AVCFormatStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB9773E2621812800C9A649 /* AVCFormatStream.swift */; }; + BCB9D79F2BF12AD8008C5B1B /* SampleVideo_360x240_5mb.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 29B876D91CD70CE700FC07DA /* SampleVideo_360x240_5mb.mp4 */; }; BCC1A72B264FAC1800661156 /* ESSpecificData.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC1A72A264FAC1800661156 /* ESSpecificData.swift */; }; BCC4F4152AD6FC1100954EF5 /* IOTellyUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC4F4142AD6FC1100954EF5 /* IOTellyUnit.swift */; }; BCC4F43D2ADB966800954EF5 /* NetStreamSwitcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */; }; @@ -271,6 +281,9 @@ BCD63AE226FDF3500084842D /* Logboard.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; BCD8702B2BC266CD009E495B /* IOStreamObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD8702A2BC266CD009E495B /* IOStreamObserver.swift */; }; BCD91C0D2A700FF50033F9E1 /* IOAudioRingBufferTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */; }; + BCDEB4F82BE441D300EEC6ED /* ScreenRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCDEB4F72BE441D300EEC6ED /* ScreenRenderer.swift */; }; + BCDEB4FA2BE442F900EEC6ED /* Screen.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCDEB4F92BE442F900EEC6ED /* Screen.swift */; }; + BCDEB4FC2BE4436D00EEC6ED /* ScreenObjectContainer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCDEB4FB2BE4436D00EEC6ED /* ScreenObjectContainer.swift */; }; BCE0E33D2AD369550082C16F /* NetStreamSwitcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */; }; BCFB355524FA27EA00DC5108 /* PlaybackViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFB355324FA275600DC5108 /* PlaybackViewController.swift */; }; BCFB355A24FA40DD00DC5108 /* PlaybackContainerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFB355924FA40DD00DC5108 /* PlaybackContainerViewController.swift */; }; @@ -572,6 +585,12 @@ BC0587C02BD2A123006751C8 /* IOAudioMixerBySingleTrackTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioMixerBySingleTrackTests.swift; sourceTree = ""; }; BC0587C22BD2A5E8006751C8 /* IOAudioMixerByMultiTrackTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioMixerByMultiTrackTests.swift; sourceTree = ""; }; BC0587D12BD2CA7F006751C8 /* AudioStreamBasicDescription+DebugExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioStreamBasicDescription+DebugExtension.swift"; sourceTree = ""; }; + BC0B5B112BE8CFA800D83F8E /* CMVideoDimention+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMVideoDimention+Extension.swift"; sourceTree = ""; }; + BC0B5B132BE8DFE300D83F8E /* AVLayerVideoGravity+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVLayerVideoGravity+Extension.swift"; sourceTree = ""; }; + BC0B5B162BE919D000D83F8E /* ScreenObjectTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenObjectTests.swift; sourceTree = ""; }; + BC0B5B1B2BE925CA00D83F8E /* ScreenObjectContainerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenObjectContainerTests.swift; sourceTree = ""; }; + BC0B5B1D2BE9310800D83F8E /* CMVideoSampleBufferFactory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CMVideoSampleBufferFactory.swift; sourceTree = ""; }; + BC0B5B1F2BE931D600D83F8E /* VideoTrackScreenObjectTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoTrackScreenObjectTests.swift; sourceTree = ""; }; BC0BF4F429866FDE00D72CB4 /* IOMixerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOMixerTests.swift; sourceTree = ""; }; BC0D236C26331BAB001DDA0C /* DataBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DataBuffer.swift; sourceTree = ""; }; BC0F1FD42ACBD39600C326FF /* MemoryUsage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MemoryUsage.swift; sourceTree = ""; }; @@ -581,9 +600,12 @@ BC1102492925147300D48035 /* IOCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOCaptureUnit.swift; sourceTree = ""; }; BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "vImage_Buffer+Extension.swift"; sourceTree = ""; }; BC110256292E661E00D48035 /* IOVideoMixerSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoMixerSettings.swift; sourceTree = ""; }; + BC16019B2BE0E4750061BD3E /* ScreenObject.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenObject.swift; sourceTree = ""; }; BC1720A82C03473200F65941 /* AVCDecoderConfigurationRecordTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVCDecoderConfigurationRecordTests.swift; sourceTree = ""; }; BC1720B52C08C59B00F65941 /* TypedBlockQueue.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TypedBlockQueue.swift; sourceTree = ""; }; + BC1CCF572BE4D5150067198A /* game_jikkyou.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = game_jikkyou.png; sourceTree = ""; }; BC1CCF5F2BE53FE70067198A /* IOAudioTime.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioTime.swift; sourceTree = ""; }; + BC1CCF612BE66C220067198A /* CGImage+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGImage+Extension.swift"; sourceTree = ""; }; BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+Extension.swift"; sourceTree = ""; }; BC1DC5032A02894D00E928ED /* FLVVideoFourCCTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FLVVideoFourCCTests.swift; sourceTree = ""; }; BC1DC5092A039B4400E928ED /* HEVCDecoderConfigurationRecord.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HEVCDecoderConfigurationRecord.swift; sourceTree = ""; }; @@ -689,6 +711,9 @@ BCD63ABB26FDF12A0084842D /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; BCD8702A2BC266CD009E495B /* IOStreamObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOStreamObserver.swift; sourceTree = ""; }; BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioRingBufferTests.swift; sourceTree = ""; }; + BCDEB4F72BE441D300EEC6ED /* ScreenRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenRenderer.swift; sourceTree = ""; }; + BCDEB4F92BE442F900EEC6ED /* Screen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Screen.swift; sourceTree = ""; }; + BCDEB4FB2BE4436D00EEC6ED /* ScreenObjectContainer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenObjectContainer.swift; sourceTree = ""; }; BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamSwitcher.swift; sourceTree = ""; }; BCFB355324FA275600DC5108 /* PlaybackViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaybackViewController.swift; sourceTree = ""; }; BCFB355924FA40DD00DC5108 /* PlaybackContainerViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaybackContainerViewController.swift; sourceTree = ""; }; @@ -853,8 +878,8 @@ children = ( BC7C56D029A78D4F00C41A9B /* ADTSHeaderTests.swift */, BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */, - BCCBCE9429A7C9C90095B51C /* AVCFormatStreamTests.swift */, 2917CB652104CA2800F6823A /* AudioSpecificConfigTests.swift */, + BCCBCE9429A7C9C90095B51C /* AVCFormatStreamTests.swift */, BC7C56C629A7701F00C41A9B /* ESSpecificDataTests.swift */, BC1DC5112A04E46E00E928ED /* HEVCDecoderConfigurationRecordTests.swift */, BCCBCE9A29A9D96A0095B51C /* NALUnitReaderTests.swift */, @@ -963,6 +988,7 @@ 296543541D62FE3E00734698 /* macOS */ = { isa = PBXGroup; children = ( + BC1CCF572BE4D5150067198A /* game_jikkyou.png */, 296543641D62FEB700734698 /* AppDelegate.swift */, 296543651D62FEB700734698 /* Assets.xcassets */, 291619671E7EFE4E009FB344 /* CameraIngestViewController.swift */, @@ -1022,16 +1048,18 @@ 29798E5A1CE60E5300F5CBD0 /* Tests */ = { isa = PBXGroup; children = ( - 29798E5D1CE60E5300F5CBD0 /* Info.plist */, - 295018211FFA1C9D00358E10 /* CMAudioSampleBufferFactory.swift */, - 294637A71EC89BC9008EEC71 /* Config.swift */, 291C2AD11CE9FF3E006F042B /* Asset */, + 295018211FFA1C9D00358E10 /* CMAudioSampleBufferFactory.swift */, + BC0B5B1D2BE9310800D83F8E /* CMVideoSampleBufferFactory.swift */, 295018191FFA196800358E10 /* Codec */, + 294637A71EC89BC9008EEC71 /* Config.swift */, 291C2AD21CE9FF48006F042B /* Core */, BC03945D2AA8AFDD006EDE38 /* Extension */, + 29798E5D1CE60E5300F5CBD0 /* Info.plist */, BC0BF4F329866FB700D72CB4 /* IO */, 291C2ACF1CE9FF2B006F042B /* ISO */, 291C2ACE1CE9FF25006F042B /* RTMP */, + BC0B5B152BE919B700D83F8E /* Screen */, 291C2AD01CE9FF33006F042B /* Util */, ); path = Tests; @@ -1151,8 +1179,11 @@ BC562DCA29576D220048D89A /* AVCaptureSession.Preset+Extension.swift */, BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */, BC2828AE2AA322E400741013 /* AVFrameRateRange+Extension.swift */, + BC0B5B132BE8DFE300D83F8E /* AVLayerVideoGravity+Extension.swift */, + BC1CCF612BE66C220067198A /* CGImage+Extension.swift */, 29EA87DE1E79A0810043A5F8 /* CMSampleBuffer+Extension.swift */, BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */, + BC0B5B112BE8CFA800D83F8E /* CMVideoDimention+Extension.swift */, 29EA87E11E79A1E90043A5F8 /* CMVideoFormatDescription+Extension.swift */, 29EA87EC1E79A3E30043A5F8 /* CVPixelBuffer+Extension.swift */, BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */, @@ -1204,6 +1235,16 @@ path = DebugDescription; sourceTree = ""; }; + BC0B5B152BE919B700D83F8E /* Screen */ = { + isa = PBXGroup; + children = ( + BC0B5B1B2BE925CA00D83F8E /* ScreenObjectContainerTests.swift */, + BC0B5B162BE919D000D83F8E /* ScreenObjectTests.swift */, + BC0B5B1F2BE931D600D83F8E /* VideoTrackScreenObjectTests.swift */, + ); + path = Screen; + sourceTree = ""; + }; BC0BF4F329866FB700D72CB4 /* IO */ = { isa = PBXGroup; children = ( @@ -1245,6 +1286,10 @@ children = ( BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */, BC959EEE296EE4190067BA97 /* ImageTransform.swift */, + BCDEB4F92BE442F900EEC6ED /* Screen.swift */, + BC16019B2BE0E4750061BD3E /* ScreenObject.swift */, + BCDEB4FB2BE4436D00EEC6ED /* ScreenObjectContainer.swift */, + BCDEB4F72BE441D300EEC6ED /* ScreenRenderer.swift */, BC3004CD296B0A1700119932 /* Shape.swift */, BC6FC91D29609A6800A746EE /* ShapeFactory.swift */, 29B8768F1CD70AFE00FC07DA /* VideoEffect.swift */, @@ -1639,6 +1684,8 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + BCB9D79F2BF12AD8008C5B1B /* SampleVideo_360x240_5mb.mp4 in Resources */, + BC1CCF582BE4D5150067198A /* game_jikkyou.png in Resources */, 291619661E7EFB09009FB344 /* Main.storyboard in Resources */, 2923A1F71D63005F0019FBCD /* Assets.xcassets in Resources */, ); @@ -1791,6 +1838,7 @@ BC9CFA9323BDE8B700917EEF /* IOStreamView.swift in Sources */, 29B8769C1CD70B1100FC07DA /* NetClient.swift in Sources */, 29B876871CD70AE800FC07DA /* TSProgram.swift in Sources */, + BC0B5B122BE8CFA800D83F8E /* CMVideoDimention+Extension.swift in Sources */, BC558268240BB40E00011AC0 /* RTMPStreamInfo.swift in Sources */, BC3802122AB5E770001AE399 /* IOVideoCaptureUnit.swift in Sources */, 298BCF331DD4C44A007FF86A /* AnyUtil.swift in Sources */, @@ -1805,6 +1853,7 @@ BC110257292E661E00D48035 /* IOVideoMixerSettings.swift in Sources */, BC6889782B011AEB0026A4C2 /* IOCaptureSession.swift in Sources */, BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */, + BCDEB4FC2BE4436D00EEC6ED /* ScreenObjectContainer.swift in Sources */, 29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */, BC0D236D26331BAB001DDA0C /* DataBuffer.swift in Sources */, 29EA87ED1E79A3E30043A5F8 /* CVPixelBuffer+Extension.swift in Sources */, @@ -1816,8 +1865,10 @@ BCCBCE9729A90D880095B51C /* AVCNALUnit.swift in Sources */, 29B876BD1CD70B3900FC07DA /* CRC32.swift in Sources */, BC4914A628DDD367009E2DF6 /* VTSessionOption.swift in Sources */, + BC1CCF622BE66C220067198A /* CGImage+Extension.swift in Sources */, BC0F1FDA2ACC4CC100C326FF /* IOCaptureVideoPreview.swift in Sources */, BC4914B228DDFE31009E2DF6 /* VTSessionOptionKey.swift in Sources */, + BCDEB4FA2BE442F900EEC6ED /* Screen.swift in Sources */, BC7C56CD29A786AE00C41A9B /* ADTS.swift in Sources */, BC42316A2BCA8BE5003A80DC /* IOAudioMixerBySingleTrack.swift in Sources */, BC562DCB29576D220048D89A /* AVCaptureSession.Preset+Extension.swift in Sources */, @@ -1827,11 +1878,13 @@ 29EA87D51E799F670043A5F8 /* Mirror+Extension.swift in Sources */, BCC4F4152AD6FC1100954EF5 /* IOTellyUnit.swift in Sources */, 2942A4F821A9418A004E1BEE /* Running.swift in Sources */, + BC0B5B142BE8DFE300D83F8E /* AVLayerVideoGravity+Extension.swift in Sources */, 29F6F4851DFB83E200920A3A /* RTMPHandshake.swift in Sources */, BCABED212BDE23C600CC7E73 /* AudioNode+DebugExtension.swift in Sources */, 29EA87DF1E79A0810043A5F8 /* CMSampleBuffer+Extension.swift in Sources */, BC562DC7295767860048D89A /* AVCaptureDevice+Extension.swift in Sources */, BC0F1FDC2ACC630400C326FF /* NSView+Extension.swift in Sources */, + BC16019C2BE0E4750061BD3E /* ScreenObject.swift in Sources */, 29EA87E21E79A1E90043A5F8 /* CMVideoFormatDescription+Extension.swift in Sources */, BC42316C2BCB7084003A80DC /* FeatureUtil.swift in Sources */, BC110253292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */, @@ -1872,11 +1925,13 @@ BC04A2D42AD2D1D700C87A3E /* AVAudioTime+Extension.swift in Sources */, BC7C56B7299E579F00C41A9B /* AudioCodecSettings.swift in Sources */, 29B876AC1CD70B2800FC07DA /* AMF3Serializer.swift in Sources */, + BC1720AC2C04FF7200F65941 /* TypedBlockQueue.swift in Sources */, BC31DBD22A653D1600C4DEA3 /* IOAudioMonitor.swift in Sources */, BCB976DF26107B5600C9A649 /* TSField.swift in Sources */, BC11024A2925147300D48035 /* IOCaptureUnit.swift in Sources */, 29B876921CD70AFE00FC07DA /* IOMixer.swift in Sources */, 29DC17B321D0CC0600E26CED /* Atomic.swift in Sources */, + BCDEB4F82BE441D300EEC6ED /* ScreenRenderer.swift in Sources */, BC959EEF296EE4190067BA97 /* ImageTransform.swift in Sources */, 2942424D1CF4C01300D65DCB /* MD5.swift in Sources */, 29EA87D31E799F360043A5F8 /* ExpressibleByIntegerLiteral+Extension.swift in Sources */, @@ -1908,6 +1963,7 @@ 290EA8991DFB619600053022 /* PacketizedElementaryStreamTests.swift in Sources */, 290EA8911DFB616000053022 /* SwiftCore+ExtensionTests.swift in Sources */, BC0BF4F529866FDE00D72CB4 /* IOMixerTests.swift in Sources */, + BC0B5B1E2BE9310800D83F8E /* CMVideoSampleBufferFactory.swift in Sources */, 290EA89A1DFB619600053022 /* TSProgramTests.swift in Sources */, BC3802192AB6AD79001AE399 /* IOAudioMixerTrackTests.swift in Sources */, BC1DC5042A02894D00E928ED /* FLVVideoFourCCTests.swift in Sources */, @@ -1921,10 +1977,13 @@ 290EA8A11DFB61B100053022 /* RTMPChunkTests.swift in Sources */, 290EA89F1DFB61B100053022 /* AMF0SerializerTests.swift in Sources */, BC03945F2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift in Sources */, + BC0B5B172BE919D000D83F8E /* ScreenObjectTests.swift in Sources */, 290EA8AA1DFB61E700053022 /* CRC32Tests.swift in Sources */, 035AFA042263868E009DD0BB /* RTMPStreamTests.swift in Sources */, BCA3A5272BC507880083BBB1 /* RTMPTimestampTests.swift in Sources */, 290686031DFDB7A7008EB7ED /* RTMPConnectionTests.swift in Sources */, + BC0B5B1C2BE925CA00D83F8E /* ScreenObjectContainerTests.swift in Sources */, + BC0B5B202BE931D600D83F8E /* VideoTrackScreenObjectTests.swift in Sources */, BCC9E9092636FF7400948774 /* DataBufferTests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/HaishinKit.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme b/HaishinKit.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme index b8108baad..7c0e6c02e 100644 --- a/HaishinKit.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme +++ b/HaishinKit.xcodeproj/xcshareddata/xcschemes/Screencast.xcscheme @@ -57,8 +57,12 @@ debugServiceExtension = "internal" allowLocationSimulation = "YES" launchAutomaticallySubstyle = "2"> - + + + - + || +### Offscreen Rendering. +Through off-screen rendering capabilities, it is possible to display any text or bitmap on a video during broadcasting or viewing. This allows for various applications such as watermarking and time display. ```swift -// If you want to use the multi-camera feature, please make sure stream.isMultiCamSessionEnabled = true. Before attachCamera or attachAudio. -stream.isMultiCamSessionEnabled = true - -let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) -stream.attachCamera(back, track: 0) { _, error in - if let error { - logger.warn(error) - } -} - -let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) -stream.attachCamera(front, track: 1) { videoUnit, error in - videoUnit?.isVideoMirrored = true - if let error { - logger.error(error) - } +let videoScreenObject = VideoTrackScreenObject() +videoScreenObject.cornerRadius = 32.0 +videoScreenObject.track = 1 +videoScreenObject.horizontalAlignment = .right +videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16) +videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2) +_ = videoScreenObject.registerVideoEffect(MonochromeEffect()) +try? stream.screen.addChild(videoScreenObject) + +let imageScreenObject = ImageScreenObject() +let imageURL = URL(fileURLWithPath: Bundle.main.path(forResource: "game_jikkyou", ofType: "png") ?? "") +if let provider = CGDataProvider(url: imageURL as CFURL) { + imageScreenObject.verticalAlignment = .bottom + imageScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 0) + imageScreenObject.cgImage = CGImage( + pngDataProviderSource: provider, + decode: nil, + shouldInterpolate: false, + intent: .defaultIntent + ) +} else { + logger.info("no image") } +try? stream.screen.addChild(imageScreenObject) ``` ### Rendering diff --git a/Sources/Extension/AVLayerVideoGravity+Extension.swift b/Sources/Extension/AVLayerVideoGravity+Extension.swift new file mode 100644 index 000000000..3ee238d1f --- /dev/null +++ b/Sources/Extension/AVLayerVideoGravity+Extension.swift @@ -0,0 +1,34 @@ +import AVFoundation +import Foundation + +extension AVLayerVideoGravity { + func scale(_ display: CGSize, image: CGSize) -> CGAffineTransform { + switch self { + case .resize: + return .init(scaleX: display.width / image.width, y: display.width / image.height) + case .resizeAspect: + let scale = min(display.width / image.width, display.height / image.height) + return .init(scaleX: scale, y: scale) + case .resizeAspectFill: + let scale = max(display.width / image.width, display.height / image.height) + return .init(scaleX: scale, y: scale) + default: + return .init(scaleX: 1.0, y: 1.0) + } + } + + func region(_ display: CGRect, image: CGRect) -> CGRect { + switch self { + case .resize: + return image + case .resizeAspect: + return image + case .resizeAspectFill: + let x = abs(display.width - image.width) / 2 + let y = abs(display.height - image.height) / 2 + return .init(origin: .init(x: x, y: y), size: display.size) + default: + return image + } + } +} diff --git a/Sources/Extension/CGImage+Extension.swift b/Sources/Extension/CGImage+Extension.swift new file mode 100644 index 000000000..77f9ff2af --- /dev/null +++ b/Sources/Extension/CGImage+Extension.swift @@ -0,0 +1,8 @@ +import CoreGraphics +import Foundation + +extension CGImage { + var size: CGSize { + return .init(width: width, height: height) + } +} diff --git a/Sources/Extension/CMVideoDimention+Extension.swift b/Sources/Extension/CMVideoDimention+Extension.swift new file mode 100644 index 000000000..525ba839a --- /dev/null +++ b/Sources/Extension/CMVideoDimention+Extension.swift @@ -0,0 +1,8 @@ +import AVFoundation +import Foundation + +extension CMVideoDimensions { + var size: CGSize { + return .init(width: CGFloat(width), height: CGFloat(height)) + } +} diff --git a/Sources/Extension/CVPixelBuffer+Extension.swift b/Sources/Extension/CVPixelBuffer+Extension.swift index dfe1f03c9..6b35ef761 100644 --- a/Sources/Extension/CVPixelBuffer+Extension.swift +++ b/Sources/Extension/CVPixelBuffer+Extension.swift @@ -5,6 +5,7 @@ import Foundation extension CVPixelBuffer { enum Error: Swift.Error { case failedToMakevImage_Buffer(_ error: vImage_Error) + case failedToLock(_ status: CVReturn) } static var format = vImage_CGImageFormat( @@ -16,68 +17,49 @@ extension CVPixelBuffer { decode: nil, renderingIntent: .defaultIntent) - var width: Int { - CVPixelBufferGetWidth(self) + var size: CGSize { + return .init(width: CVPixelBufferGetWidth(self), height: CVPixelBufferGetHeight(self)) } - var height: Int { - CVPixelBufferGetHeight(self) + var pixelFormatType: OSType { + CVPixelBufferGetPixelFormatType(self) } - @discardableResult - func over(_ pixelBuffer: CVPixelBuffer?, regionOfInterest roi: CGRect = .zero, radius: CGFloat = 0.0) -> Self { - guard var inputImageBuffer = try? pixelBuffer?.makevImage_Buffer(format: &Self.format) else { - return self - } - defer { - inputImageBuffer.free() - } - guard var srcImageBuffer = try? makevImage_Buffer(format: &Self.format) else { - return self + func swap(_ pixelBuffer: CVPixelBuffer?) throws { + try pixelBuffer?.doTransaction(.readOnly) { pixelBuffer in + guard var imageBuffer = try? pixelBuffer.makevImage_Buffer(format: &Self.format) else { + return + } + defer { + imageBuffer.free() + } + imageBuffer.copy(to: self, format: &Self.format) } - defer { - srcImageBuffer.free() + } + + @inline(__always) + func doTransaction(_ lockFlags: CVPixelBufferLockFlags, lambda: (CVPixelBuffer) throws -> Void) throws { + let status = CVPixelBufferLockBaseAddress(self, lockFlags) + guard status == kCVReturnSuccess else { + throw Error.failedToLock(status) } - let xScale = Float(roi.width) / Float(inputImageBuffer.width) - let yScale = Float(roi.height) / Float(inputImageBuffer.height) - let scaleFactor = (xScale < yScale) ? xScale : yScale - var scaledInputImageBuffer = inputImageBuffer.scale(scaleFactor) - var shape = ShapeFactory.shared.cornerRadius(CGSize(width: CGFloat(scaledInputImageBuffer.width), height: CGFloat(scaledInputImageBuffer.height)), cornerRadius: radius) - vImageSelectChannels_ARGB8888(&shape, &scaledInputImageBuffer, &scaledInputImageBuffer, 0x8, vImage_Flags(kvImageNoFlags)) defer { - scaledInputImageBuffer.free() + CVPixelBufferUnlockBaseAddress(self, lockFlags) } - srcImageBuffer.over(&scaledInputImageBuffer, origin: roi.origin) - srcImageBuffer.copy(to: self, format: &Self.format) - return self + try lambda(self) } @discardableResult - func split(_ pixelBuffer: CVPixelBuffer?, direction: ImageTransform) -> Self { - guard var inputImageBuffer = try? pixelBuffer?.makevImage_Buffer(format: &Self.format) else { - return self - } - defer { - inputImageBuffer.free() - } - guard var sourceImageBuffer = try? makevImage_Buffer(format: &Self.format) else { - return self - } - defer { - sourceImageBuffer.free() - } - let scaleX = Float(width) / Float(inputImageBuffer.width) - let scaleY = Float(height) / Float(inputImageBuffer.height) - var scaledInputImageBuffer = inputImageBuffer.scale(min(scaleY, scaleX)) - defer { - scaledInputImageBuffer.free() - } - sourceImageBuffer.split(&scaledInputImageBuffer, direction: direction) - sourceImageBuffer.copy(to: self, format: &Self.format) - return self + func lockBaseAddress(_ lockFlags: CVPixelBufferLockFlags = CVPixelBufferLockFlags.readOnly) -> CVReturn { + return CVPixelBufferLockBaseAddress(self, lockFlags) + } + + @discardableResult + func unlockBaseAddress(_ lockFlags: CVPixelBufferLockFlags = CVPixelBufferLockFlags.readOnly) -> CVReturn { + return CVPixelBufferUnlockBaseAddress(self, lockFlags) } - func makevImage_Buffer(format: inout vImage_CGImageFormat) throws -> vImage_Buffer { + private func makevImage_Buffer(format: inout vImage_CGImageFormat) throws -> vImage_Buffer { var buffer = vImage_Buffer() let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(self).takeRetainedValue() vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB()) @@ -93,14 +75,4 @@ extension CVPixelBuffer { } return buffer } - - @discardableResult - func lockBaseAddress(_ lockFlags: CVPixelBufferLockFlags = CVPixelBufferLockFlags.readOnly) -> CVReturn { - return CVPixelBufferLockBaseAddress(self, lockFlags) - } - - @discardableResult - func unlockBaseAddress(_ lockFlags: CVPixelBufferLockFlags = CVPixelBufferLockFlags.readOnly) -> CVReturn { - return CVPixelBufferUnlockBaseAddress(self, lockFlags) - } } diff --git a/Sources/Extension/vImage_Buffer+Extension.swift b/Sources/Extension/vImage_Buffer+Extension.swift index 8e85d5670..7af7a8f64 100644 --- a/Sources/Extension/vImage_Buffer+Extension.swift +++ b/Sources/Extension/vImage_Buffer+Extension.swift @@ -33,82 +33,4 @@ extension vImage_Buffer { nil, vImage_Flags(kvImageNoFlags)) } - - @discardableResult - mutating func scale(_ factor: Float) -> Self { - var imageBuffer = vImage_Buffer() - guard vImageBuffer_Init( - &imageBuffer, - vImagePixelCount(Float(height) * factor), - vImagePixelCount(Float(width) * factor), - 32, - vImage_Flags(kvImageNoFlags)) == kvImageNoError else { - return self - } - guard vImageScale_ARGB8888( - &self, - &imageBuffer, - nil, - vImage_Flags(kvImageNoFlags)) == kvImageNoError else { - return self - } - return imageBuffer - } - - @discardableResult - mutating func over(_ src: inout vImage_Buffer, origin: CGPoint = .zero) -> Self { - let start = Int(origin.y) * rowBytes + Int(origin.x) * 4 - var destination = vImage_Buffer( - data: data.advanced(by: start), - height: vImagePixelCount(src.height), - width: vImagePixelCount(src.width), - rowBytes: rowBytes - ) - guard vImageAlphaBlend_ARGB8888( - &src, - &destination, - &destination, - vImage_Flags(kvImageDoNotTile) - ) == kvImageNoError else { - return self - } - return self - } - - @discardableResult - mutating func split(_ buffer: inout vImage_Buffer, direction: ImageTransform) -> Self { - buffer.transform(direction.opposite) - var shape = ShapeFactory.shared.split(CGSize(width: CGFloat(width), height: CGFloat(height)), direction: direction.opposite) - vImageSelectChannels_ARGB8888(&shape, &buffer, &buffer, 0x8, vImage_Flags(kvImageNoFlags)) - transform(direction) - guard vImageAlphaBlend_ARGB8888( - &buffer, - &self, - &self, - vImage_Flags(kvImageDoNotTile) - ) == kvImageNoError else { - return self - } - return self - } - - private mutating func transform(_ direction: ImageTransform) { - let backgroundColor: [Pixel_8] = [0, 255, 255, 255] - var vImageTransform = vImage_CGAffineTransform( - a: 1, - b: 0, - c: 0, - d: 1, - tx: direction.tx(Double(width)), - ty: direction.ty(Double(height)) - ) - vImageAffineWarpCG_ARGB8888( - &self, - &self, - nil, - &vImageTransform, - backgroundColor, - vImage_Flags(kvImageBackgroundColorFill) - ) - } } diff --git a/Sources/IO/IOStream.swift b/Sources/IO/IOStream.swift index 1e0c94301..971312735 100644 --- a/Sources/IO/IOStream.swift +++ b/Sources/IO/IOStream.swift @@ -83,6 +83,11 @@ open class IOStream: NSObject { /// The lockQueue. public let lockQueue: DispatchQueue = .init(label: "com.haishinkit.HaishinKit.IOStream.lock", qos: .userInitiated) + /// The offscreen rendering object. + public var screen: Screen { + return mixer.videoIO.screen + } + /// Specifies the adaptibe bitrate strategy. public var bitrateStrategy: any IOStreamBitRateStrategyConvertible = IOStreamBitRateStrategy.shared { didSet { @@ -101,16 +106,6 @@ open class IOStream: NSObject { } } - /// Specifies the context object. - public var context: CIContext { - get { - mixer.videoIO.context - } - set { - mixer.videoIO.context = newValue - } - } - #if os(iOS) || os(macOS) || os(tvOS) /// Specifiet the device torch indicating wheter the turn on(TRUE) or not(FALSE). public var torch: Bool { diff --git a/Sources/IO/IOVideoMixer.swift b/Sources/IO/IOVideoMixer.swift index 2b4fced2f..590e7e8a6 100644 --- a/Sources/IO/IOVideoMixer.swift +++ b/Sources/IO/IOVideoMixer.swift @@ -4,146 +4,178 @@ import Foundation protocol IOVideoMixerDelegate: AnyObject { func videoMixer(_ videoMixer: IOVideoMixer, track: UInt8, didInput sampleBuffer: CMSampleBuffer) - func videoMixer(_ videoMixer: IOVideoMixer, didOutput imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime) - func videoMixer(_ videoMixer: IOVideoMixer, didOutput sampleBbffer: CMSampleBuffer) + func videoMixer(_ videoMixer: IOVideoMixer, didOutput sampleBuffer: CMSampleBuffer) } private let kIOVideoMixer_defaultAttributes: [NSString: NSObject] = [ - kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), + kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue ] +private let kIOVideoMixer_lockFlags = CVPixelBufferLockFlags(rawValue: .zero) + final class IOVideoMixer { - var settings: IOVideoMixerSettings = .default weak var delegate: T? - var context: CIContext = .init() { - didSet { - for effect in effects { - effect.ciContext = context - } - } + lazy var screen: Screen = { + var screen = Screen() + videoTrackScreenObject.track = settings.mainTrack + try? screen.addChild(videoTrackScreenObject) + return screen + }() + var isRunning: Atomic { + return choreographer.isRunning } - var inputFormats: [UInt8: CMFormatDescription] { - var formats: [UInt8: CMFormatDescription] = .init() - if let sampleBuffer, let formatDescription = sampleBuffer.formatDescription { - formats[0] = formatDescription - } - if let multiCamSampleBuffer, let formatDescription = multiCamSampleBuffer.formatDescription { - formats[1] = formatDescription - } - return formats - } - private var extent = CGRect.zero { + var settings: IOVideoMixerSettings = .default { didSet { - guard extent != oldValue else { - return + if settings.size != oldValue.size { + CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) + } + if settings.mainTrack != oldValue.mainTrack { + videoTrackScreenObject.track = settings.mainTrack + } + if settings.mode != oldValue.mode { + switch settings.mode { + case .offscreen: + startRunning() + case .passthrough: + stopRunning() + } } - CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) - pixelBufferPool?.createPixelBuffer(&pixelBuffer) } } + private(set) var inputFormats: [UInt8: CMFormatDescription] = [:] + private var timeStamp: CMTime = .invalid private var attributes: [NSString: NSObject] { var attributes: [NSString: NSObject] = kIOVideoMixer_defaultAttributes - attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width)) - attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height)) + attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(settings.size.width)) + attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(settings.size.height)) return attributes } - private var buffer: CVPixelBuffer? - private var pixelBuffer: CVPixelBuffer? - private var pixelBufferPool: CVPixelBufferPool? - private var sampleBuffer: CMSampleBuffer? - private var multiCamSampleBuffer: CMSampleBuffer? - private(set) var effects: [VideoEffect] = .init() + private var outputFormat: CMFormatDescription? + private var pixelBufferPool: CVPixelBufferPool? { + didSet { + outputFormat = nil + } + } + private var coverImageBuffer: CVPixelBuffer? + private lazy var choreographer: DisplayLinkChoreographer = { + var choreographer = DisplayLinkChoreographer() + choreographer.delegate = self + return choreographer + }() + private var videoTrackScreenObject = VideoTrackScreenObject() - @inline(__always) - func effect(_ buffer: CVImageBuffer, info: CMSampleBuffer?) -> CIImage { - var image = CIImage(cvPixelBuffer: buffer) - for effect in effects { - image = effect.execute(image, info: info) + init() { + switch settings.mode { + case .offscreen: + startRunning() + case .passthrough: + break } - return image } func registerEffect(_ effect: VideoEffect) -> Bool { - effect.ciContext = context - if effects.contains(effect) { - return false - } - effects.append(effect) - return true + return videoTrackScreenObject.registerVideoEffect(effect) } func unregisterEffect(_ effect: VideoEffect) -> Bool { - effect.ciContext = nil - if let index = effects.firstIndex(of: effect) { - effects.remove(at: index) - return true - } - return false + return videoTrackScreenObject.unregisterVideoEffect(effect) } func append(_ track: UInt8, sampleBuffer: CMSampleBuffer) { + inputFormats[track] = sampleBuffer.formatDescription delegate?.videoMixer(self, track: track, didInput: sampleBuffer) - if track == settings.mainTrack { - var imageBuffer: CVImageBuffer? - guard let buffer = sampleBuffer.imageBuffer else { - return - } - self.sampleBuffer = sampleBuffer - buffer.lockBaseAddress() - defer { - buffer.unlockBaseAddress() - imageBuffer?.unlockBaseAddress() - } - if let multiCamPixelBuffer = multiCamSampleBuffer?.imageBuffer { - multiCamPixelBuffer.lockBaseAddress() - switch settings.mode { - case .pip: - buffer.over( - multiCamPixelBuffer, - regionOfInterest: settings.regionOfInterest, - radius: settings.cornerRadius - ) - case .splitView: - buffer.split(multiCamPixelBuffer, direction: settings.direction) + switch settings.mode { + case .offscreen: + let videoTracks: [VideoTrackScreenObject] = screen.getScreenObjects() + videoTracks.forEach { + guard $0.track == track else { + return } - multiCamPixelBuffer.unlockBaseAddress() + $0.enqueue(sampleBuffer) } - if !effects.isEmpty { - let image = effect(buffer, info: sampleBuffer) - extent = image.extent - #if os(macOS) - pixelBufferPool?.createPixelBuffer(&imageBuffer) - #else - if settings.alwaysUseBufferPoolForVideoEffects || buffer.width != Int(extent.width) || buffer.height != Int(extent.height) { - pixelBufferPool?.createPixelBuffer(&imageBuffer) - } - #endif - imageBuffer?.lockBaseAddress() - context.render(image, to: imageBuffer ?? buffer) - } - if settings.isMuted { - imageBuffer = pixelBuffer + case .passthrough: + if settings.mainTrack == track { + outputSampleBuffer(sampleBuffer) } - delegate?.videoMixer(self, didOutput: imageBuffer ?? buffer, presentationTimeStamp: sampleBuffer.presentationTimeStamp) - if !settings.isMuted { - pixelBuffer = buffer + } + } + + @inline(__always) + private func outputSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + outputFormat = sampleBuffer.formatDescription + defer { + coverImageBuffer = sampleBuffer.imageBuffer + } + guard settings.isMuted else { + delegate?.videoMixer(self, didOutput: sampleBuffer) + return + } + do { + try sampleBuffer.imageBuffer?.doTransaction(kIOVideoMixer_lockFlags) { imageBuffer in + try imageBuffer.swap(coverImageBuffer) } delegate?.videoMixer(self, didOutput: sampleBuffer) - } else { - multiCamSampleBuffer = sampleBuffer + } catch { + logger.warn(error) } } +} - func detach(_ track: UInt8) { - switch track { - case 0: - pixelBuffer = nil - sampleBuffer = nil - case 1: - multiCamSampleBuffer = nil - default: - break +extension IOVideoMixer: ChoreographerDelegate { + // MARK: ChoreographerDelegate + func choreographer(_ choreographer: any Choreographer, didFrame duration: Double) { + var pixelBuffer: CVPixelBuffer? + pixelBufferPool?.createPixelBuffer(&pixelBuffer) + guard let pixelBuffer else { + return } + if outputFormat == nil { + CMVideoFormatDescriptionCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescriptionOut: &outputFormat + ) + } + guard let outputFormat else { + return + } + if let dictionary = CVBufferGetAttachments(pixelBuffer, .shouldNotPropagate) { + CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate) + } + let now = CMClock.hostTimeClock.time + var timingInfo = CMSampleTimingInfo( + duration: timeStamp == .invalid ? .zero : now - timeStamp, + presentationTimeStamp: now, + decodeTimeStamp: .invalid + ) + timeStamp = now + var sampleBuffer: CMSampleBuffer? + guard CMSampleBufferCreateReadyWithImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescription: outputFormat, + sampleTiming: &timingInfo, + sampleBufferOut: &sampleBuffer + ) == noErr else { + return + } + if let sampleBuffer { + outputSampleBuffer(screen.render(sampleBuffer)) + } + } +} + +extension IOVideoMixer: Running { + // MARK: Running + func startRunning() { + CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) + choreographer.preferredFramesPerSecond = settings.frameRate + choreographer.startRunning() + choreographer.isPaused = false + } + + func stopRunning() { + choreographer.stopRunning() } } diff --git a/Sources/IO/IOVideoMixerSettings.swift b/Sources/IO/IOVideoMixerSettings.swift index 5a32b0038..a9a79e505 100644 --- a/Sources/IO/IOVideoMixerSettings.swift +++ b/Sources/IO/IOVideoMixerSettings.swift @@ -2,47 +2,31 @@ import Accelerate import CoreMedia import Foundation -/// The IOVideoMixerSettings represents the pip capture settings for the video capture. +/// Constraints on the audio mixier settings. public struct IOVideoMixerSettings: Codable { - /// The type of image display mode. + /// The default setting for the stream. + public static let `default`: IOVideoMixerSettings = .init() + + /// The type of image rendering mode. public enum Mode: String, Codable { - /// The picture in picture mode means video stream playing within an inset window, freeing the rest of the screen for other tasks. - case pip - /// The split view means video stream playing within two individual windows. - case splitView + /// The input buffer will be used as it is. No effects will be applied. + case passthrough + /// Off-screen rendering will be performed to allow for more flexible drawing. + case offscreen } - /// The default setting for the stream. - public static let `default` = IOVideoMixerSettings( - mode: .pip, - cornerRadius: 16.0, - regionOfInterest: .init( - origin: CGPoint(x: 16, y: 16), - size: .init(width: 160, height: 160) - ), - direction: .east - ) + /// Specifies the image rendering mode. + public var mode: Mode = .offscreen - /// The image display mode. - public let mode: Mode - /// The cornerRadius of the picture in picture image. - public let cornerRadius: CGFloat - /// The region of the picture in picture image. - public let regionOfInterest: CGRect - /// The direction of the splitView position. - public let direction: ImageTransform - /// Specifies the isMuted indicies whether freeze video signal or not. + /// Specifies the muted indicies whether freeze video signal or not. public var isMuted = false + /// Specifies the main track number. public var mainTrack: UInt8 = 0 - /// Specifies if effects are always rendered to a new buffer. - public var alwaysUseBufferPoolForVideoEffects = false - /// Create a new IOVideoMixerSettings. - public init(mode: Mode, cornerRadius: CGFloat, regionOfInterest: CGRect, direction: ImageTransform) { - self.mode = mode - self.cornerRadius = cornerRadius - self.regionOfInterest = regionOfInterest - self.direction = direction - } + /// Specifies the frame rate to use when output a video. + public var frameRate = 30 + + /// Specifies the video size to use when output a video. + public var size: CGSize = .init(width: 1280, height: 720) } diff --git a/Sources/IO/IOVideoUnit.swift b/Sources/IO/IOVideoUnit.swift index fae760796..d8590e31f 100644 --- a/Sources/IO/IOVideoUnit.swift +++ b/Sources/IO/IOVideoUnit.swift @@ -35,6 +35,11 @@ final class IOVideoUnit: IOUnit { #endif } } + + var screen: Screen { + return videoMixer.screen + } + var mixerSettings: IOVideoMixerSettings { get { return videoMixer.settings @@ -84,17 +89,6 @@ final class IOVideoUnit: IOUnit { !captures.lazy.filter { $0.value.device != nil }.isEmpty } - var context: CIContext { - get { - return lockQueue.sync { self.videoMixer.context } - } - set { - lockQueue.async { - self.videoMixer.context = newValue - } - } - } - var isRunning: Atomic { return codec.isRunning } @@ -184,9 +178,6 @@ final class IOVideoUnit: IOUnit { let capture = self.capture(for: track) configuration?(capture, nil) try capture?.attachDevice(device, videoUnit: self) - if device == nil { - videoMixer.detach(track) - } } if device != nil && view != nil { // Start captureing if not running. @@ -261,15 +252,14 @@ extension IOVideoUnit: IOVideoMixerDelegate { } func videoMixer(_ videoMixer: IOVideoMixer, didOutput sampleBuffer: CMSampleBuffer) { + if let imageBuffer = sampleBuffer.imageBuffer { + codec.append( + imageBuffer, + presentationTimeStamp: sampleBuffer.presentationTimeStamp, + duration: sampleBuffer.duration + ) + } view?.enqueue(sampleBuffer) mixer?.videoUnit(self, didOutput: sampleBuffer) } - - func videoMixer(_ videoMixer: IOVideoMixer, didOutput imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime) { - codec.append( - imageBuffer, - presentationTimeStamp: presentationTimeStamp, - duration: .invalid - ) - } } diff --git a/Sources/IO/MTHKView.swift b/Sources/IO/MTHKView.swift index 92b0b4500..90c708fc3 100644 --- a/Sources/IO/MTHKView.swift +++ b/Sources/IO/MTHKView.swift @@ -70,9 +70,6 @@ public class MTHKView: MTKView { private weak var currentStream: IOStream? { didSet { currentStream.map { - if let context = self.context { - $0.context = context - } $0.view = self } } diff --git a/Sources/Screen/Choreographer.swift b/Sources/Screen/Choreographer.swift index 821ed963f..fe5c1885a 100644 --- a/Sources/Screen/Choreographer.swift +++ b/Sources/Screen/Choreographer.swift @@ -8,25 +8,33 @@ import Foundation // swiftlint:disable attributes final class DisplayLink: NSObject { + private static let preferredFramesPerSecond = 0 + var isPaused = false { didSet { guard let displayLink = displayLink, oldValue != isPaused else { return } if isPaused { - status = CVDisplayLinkStop(displayLink) + CVDisplayLinkStop(displayLink) } else { - status = CVDisplayLinkStart(displayLink) + CVDisplayLinkStart(displayLink) } } } - var frameInterval = 0 - var preferredFramesPerSecond = 1 + var preferredFramesPerSecond = DisplayLink.preferredFramesPerSecond { + didSet { + guard preferredFramesPerSecond != oldValue else { + return + } + frameInterval = 1.0 / Double(preferredFramesPerSecond) + } + } private(set) var duration = 0.0 private(set) var timestamp: CFTimeInterval = 0 - private var status: CVReturn = 0 - private var displayLink: CVDisplayLink? private var selector: Selector? + private var displayLink: CVDisplayLink? + private var frameInterval = 0.0 private weak var delegate: NSObject? deinit { @@ -35,16 +43,22 @@ final class DisplayLink: NSObject { init(target: NSObject, selector sel: Selector) { super.init() - status = CVDisplayLinkCreateWithActiveCGDisplays(&displayLink) + CVDisplayLinkCreateWithActiveCGDisplays(&displayLink) guard let displayLink = displayLink else { return } self.delegate = target self.selector = sel - CVDisplayLinkSetOutputHandler(displayLink) { [unowned self] _, inNow, _, _, _ -> CVReturn in - self.timestamp = inNow.pointee.timestamp - self.duration = inNow.pointee.duration - _ = self.delegate?.perform(self.selector, with: self) + CVDisplayLinkSetOutputHandler(displayLink) { [weak self] _, inNow, _, _, _ -> CVReturn in + guard let self else { + return kCVReturnSuccess + } + self.duration += inNow.pointee.duration + if frameInterval == 0 || frameInterval < inNow.pointee.timestamp - self.timestamp { + self.timestamp = inNow.pointee.timestamp + _ = self.delegate?.perform(self.selector, with: self) + self.duration = 0.0 + } return kCVReturnSuccess } } @@ -53,14 +67,14 @@ final class DisplayLink: NSObject { guard let displayLink = displayLink, !isPaused else { return } - status = CVDisplayLinkStart(displayLink) + CVDisplayLinkStart(displayLink) } func invalidate() { guard let displayLink = displayLink, isPaused else { return } - status = CVDisplayLinkStop(displayLink) + CVDisplayLinkStop(displayLink) } } @@ -107,6 +121,7 @@ final class DisplayLinkChoreographer: NSObject, Choreographer { } weak var delegate: (any ChoreographerDelegate)? var isRunning: Atomic = .init(false) + var preferredFramesPerSecond = DisplayLinkChoreographer.preferredFramesPerSecond private var duration: Double = DisplayLinkChoreographer.duration private var displayLink: DisplayLink? { didSet { @@ -115,7 +130,7 @@ final class DisplayLinkChoreographer: NSObject, Choreographer { return } displayLink.isPaused = true - displayLink.preferredFramesPerSecond = Self.preferredFramesPerSecond + displayLink.preferredFramesPerSecond = preferredFramesPerSecond displayLink.add(to: .main, forMode: .common) } } diff --git a/Sources/Screen/Screen.swift b/Sources/Screen/Screen.swift new file mode 100644 index 000000000..40046a48c --- /dev/null +++ b/Sources/Screen/Screen.swift @@ -0,0 +1,82 @@ +import AVFoundation +import Foundation + +#if canImport(AppKit) +import AppKit +#endif + +#if canImport(UIKit) +import UIKit +#endif + +/// An interface a screen uses to inform its delegate. +public protocol ScreenDelegate: AnyObject { + /// Tells the receiver to screen object layout phase. + func screen(_ screen: Screen, willLayout time: CMTime) +} + +/// An object that manages offscreen rendering a foundation. +public final class Screen: ScreenObjectContainerConvertible { + private static let lockFrags = CVPixelBufferLockFlags(rawValue: 0) + + /// The total of child counts. + public var childCounts: Int { + return root.childCounts + } + + /// Specifies the delegate object. + public weak var delegate: (any ScreenDelegate)? + + #if os(macOS) + /// Specifies the background color. + public var backgroundColor: CGColor = NSColor.black.cgColor { + didSet { + guard backgroundColor != oldValue else { + return + } + renderer.backgroundColor = backgroundColor + } + } + #else + /// Specifies the background color. + public var backgroundColor: CGColor = UIColor.black.cgColor { + didSet { + guard backgroundColor != oldValue else { + return + } + renderer.backgroundColor = backgroundColor + } + } + #endif + private var root: ScreenObjectContainer = .init() + private(set) var renderer = ScreenRendererByCPU() + + /// Adds the specified screen object as a child of the current screen object container. + public func addChild(_ child: ScreenObject?) throws { + try root.addChild(child) + } + + /// Removes the specified screen object as a child of the current screen object container. + public func removeChild(_ child: ScreenObject?) { + root.removeChild(child) + } + + func getScreenObjects() -> [T] { + return root.getScreenObjects() + } + + func render(_ sampleBuffer: CMSampleBuffer) -> CMSampleBuffer { + sampleBuffer.imageBuffer?.lockBaseAddress(Self.lockFrags) + defer { + sampleBuffer.imageBuffer?.unlockBaseAddress(Self.lockFrags) + } + renderer.setTarget(sampleBuffer.imageBuffer) + if let dimensions = sampleBuffer.formatDescription?.dimensions { + root.size = dimensions.size + } + delegate?.screen(self, willLayout: sampleBuffer.presentationTimeStamp) + root.layout(renderer) + root.draw(renderer) + return sampleBuffer + } +} diff --git a/Sources/Screen/ScreenObject.swift b/Sources/Screen/ScreenObject.swift new file mode 100644 index 000000000..af1aae8f5 --- /dev/null +++ b/Sources/Screen/ScreenObject.swift @@ -0,0 +1,448 @@ +import Accelerate +import AVFoundation +import CoreImage +import CoreMedia +import Foundation +import VideoToolbox + +#if canImport(AppKit) +import AppKit +#endif + +#if canImport(UIKit) +import UIKit +#endif + +/// The ScreenObject class is the abstract class for all objects that are rendered on the screen. +public class ScreenObject: Hashable { + public static func == (lhs: ScreenObject, rhs: ScreenObject) -> Bool { + lhs === rhs + } + + public enum HorizontalAlignment { + case left + case center + case right + } + + public enum VerticalAlignment { + case top + case middle + case bottom + } + + /// The screen object container that contains this screen object + public internal(set) weak var parent: ScreenObjectContainer? + + /// Specifies the size rectangle. + public var size: CGSize = .zero { + didSet { + guard size != oldValue else { + return + } + shouldInvalidateLayout = true + } + } + + /// The bounds rectangle. + public internal(set) var bounds: CGRect = .zero + + /// Specifies the visibility of the object. + public var isVisible = true + + #if os(macOS) + /// Specifies the default spacing to laying out content in the screen object. + public var layoutMargin: NSEdgeInsets = .init(top: 0, left: 0, bottom: 0, right: 0) + #else + /// Specifies the default spacing to laying out content in the screen object. + public var layoutMargin: UIEdgeInsets = .init(top: 0, left: 0, bottom: 0, right: 0) + #endif + + /// Specifies the radius to use when drawing rounded corners. + public var cornerRadius: CGFloat = 0.0 + + /// Specifies the alignment position along the vertical axis. + public var verticalAlignment: VerticalAlignment = .top + + /// Specifies the alignment position along the horizontal axis. + public var horizontalAlignment: HorizontalAlignment = .left + + var shouldInvalidateLayout = true + + /// Creates a screen object. + public init() { + } + + /// Invalidates the current layout and triggers a layout update. + public func invalidateLayout() { + shouldInvalidateLayout = true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(ObjectIdentifier(self)) + } + + /// Makes cgImage for offscreen image. + public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + return nil + } + + func layout(_ renderer: some ScreenRenderer) { + bounds = makeBounds(size) + renderer.layout(self) + shouldInvalidateLayout = false + } + + func draw(_ renderer: some ScreenRenderer) { + renderer.draw(self) + } + + func makeBounds(_ size: CGSize) -> CGRect { + guard let parent else { + return .init(origin: .zero, size: self.size) + } + + let width = size.width == 0 ? max(parent.bounds.width - layoutMargin.left - layoutMargin.right + size.width, 0) : size.width + let height = size.height == 0 ? max(parent.bounds.height - layoutMargin.top - layoutMargin.bottom + size.height, 0) : size.height + + let parentX = parent.bounds.origin.x + let parentWidth = parent.bounds.width + let x: CGFloat + switch horizontalAlignment { + case .center: + x = parentX + (parentWidth - width) / 2 + case .left: + x = parentX + layoutMargin.left + case .right: + x = parentX + (parentWidth - width) - layoutMargin.right + } + + let parentY = parent.bounds.origin.y + let parentHeight = parent.bounds.height + let y: CGFloat + switch verticalAlignment { + case .top: + y = parentY + layoutMargin.top + case .middle: + y = parentY + (parentHeight - height) / 2 + case .bottom: + y = parentY + (parentHeight - height) - layoutMargin.bottom + } + + return .init(x: x, y: y, width: width, height: height) + } +} + +/// An object that manages offscreen rendering a cgImage source. +public final class ImageScreenObject: ScreenObject { + /// Specifies the image. + public var cgImage: CGImage? { + didSet { + guard cgImage != oldValue else { + return + } + if let cgImage { + size = cgImage.size + } + invalidateLayout() + } + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + return cgImage + } + + override func makeBounds(_ size: CGSize) -> CGRect { + guard let cgImage else { + return super.makeBounds(self.size) + } + return super.makeBounds(cgImage.size) + } +} + +/// An object that manages offscreen rendering a video track source. +public final class VideoTrackScreenObject: ScreenObject { + /// Specifies the track number how the displays the visual content. + public var track: UInt8 = 0 { + didSet { + guard track != oldValue else { + return + } + invalidateLayout() + } + } + + /// A value that specifies how the video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + guard videoGravity != oldValue else { + return + } + invalidateLayout() + } + } + + private var queue: TypedBlockQueue? + private var effects: [VideoEffect] = .init() + + /// Create a screen object. + override public init() { + super.init() + horizontalAlignment = .center + do { + queue = TypedBlockQueue(try CMBufferQueue(capacity: 1, handlers: .outputPTSSortedSampleBuffers)) + } catch { + logger.error(error) + } + } + + /// Registers a video effect. + public func registerVideoEffect(_ effect: VideoEffect) -> Bool { + if effects.contains(where: { $0 === effect }) { + return false + } + effects.append(effect) + return true + } + + /// Unregisters a video effect. + public func unregisterVideoEffect(_ effect: VideoEffect) -> Bool { + if let index = effects.firstIndex(where: { $0 === effect }) { + effects.remove(at: index) + return true + } + return false + } + + func enqueue(_ sampleBuffer: CMSampleBuffer) { + try? queue?.enqueue(sampleBuffer) + invalidateLayout() + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let sampleBuffer = queue?.dequeue(), let pixelBuffer = sampleBuffer.imageBuffer else { + return nil + } + // Resizing before applying the filter for performance optimization. + var image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( + bounds.size, + image: pixelBuffer.size + )) + if effects.isEmpty { + return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + } else { + for effect in effects { + image = effect.execute(image, info: sampleBuffer) + } + return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + } + } + + override func makeBounds(_ size: CGSize) -> CGRect { + guard parent != nil, let image = queue?.head?.formatDescription?.dimensions.size else { + return super.makeBounds(size) + } + let bounds = super.makeBounds(size) + switch videoGravity { + case .resizeAspect: + let scale = min(bounds.size.width / image.width, bounds.size.height / image.height) + let scaleSize = CGSize(width: image.width * scale, height: image.height * scale) + return super.makeBounds(scaleSize) + case .resizeAspectFill: + return bounds + default: + return bounds + } + } +} + +/// An object that manages offscreen rendering a text source. +public final class TextScreenObject: ScreenObject { + /// Specifies the text value. + public var string: String = "" { + didSet { + guard string != oldValue else { + return + } + invalidateLayout() + } + } + + #if os(macOS) + /// Specifies the attributes for strings. + public var attributes: [NSAttributedString.Key: Any]? = [ + .font: NSFont.boldSystemFont(ofSize: 32), + .foregroundColor: NSColor.white + ] { + didSet { + invalidateLayout() + } + } + #else + /// Specifies the attributes for strings. + public var attributes: [NSAttributedString.Key: Any]? = [ + .font: UIFont.boldSystemFont(ofSize: 32), + .foregroundColor: UIColor.white + ] { + didSet { + invalidateLayout() + } + } + #endif + + override public var bounds: CGRect { + didSet { + guard bounds != oldValue else { + return + } + context = CGContext( + data: nil, + width: Int(bounds.width), + height: Int(bounds.height), + bitsPerComponent: 8, + bytesPerRow: Int(bounds.width) * 4, + space: CGColorSpaceCreateDeviceRGB(), + bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue).rawValue + ) + } + } + + private var context: CGContext? + private var framesetter: CTFramesetter? + + override public func makeBounds(_ size: CGSize) -> CGRect { + guard !string.isEmpty else { + self.framesetter = nil + return .zero + } + let bounds = super.makeBounds(size) + let attributedString = NSAttributedString(string: string, attributes: attributes) + let framesetter = CTFramesetterCreateWithAttributedString(attributedString) + let frameSize = CTFramesetterSuggestFrameSizeWithConstraints( + framesetter, + .init(), + nil, + bounds.size, + nil + ) + self.framesetter = framesetter + return super.makeBounds(frameSize) + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let context, let framesetter else { + return nil + } + let path = CGPath(rect: .init(origin: .zero, size: bounds.size), transform: nil) + let frame = CTFramesetterCreateFrame(framesetter, .init(), path, nil) + context.clear(context.boundingBoxOfPath) + CTFrameDraw(frame, context) + return context.makeImage() + } +} + +/// An object that manages offscreen rendering an asset resource. +public final class AssetScreenObject: ScreenObject { + private static let outputSettings = [ + kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA + ] as [String: Any] + + public var isReading: Bool { + return reader?.status == .reading + } + + /// A value that specifies how the video is displayed within a player layer’s bounds. + public var videoGravity: AVLayerVideoGravity = .resizeAspect { + didSet { + guard videoGravity != oldValue else { + return + } + invalidateLayout() + } + } + + private var reader: AVAssetReader? { + didSet { + if let oldValue, oldValue.status == .reading { + oldValue.cancelReading() + } + } + } + + private var sampleBuffer: CMSampleBuffer? { + didSet { + guard sampleBuffer != oldValue else { + return + } + if sampleBuffer == nil { + cancelReading() + return + } + invalidateLayout() + } + } + + private var startedAt: CMTime = .zero + private var videoTrackOutput: AVAssetReaderTrackOutput? + + /// Prepares the asset reader to start reading. + public func startReading(_ asset: AVAsset) throws { + reader = try AVAssetReader(asset: asset) + guard let reader else { + return + } + let videoTrack = asset.tracks(withMediaType: .video).first + if let videoTrack { + let videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: Self.outputSettings) + videoTrackOutput.alwaysCopiesSampleData = false + reader.add(videoTrackOutput) + self.videoTrackOutput = videoTrackOutput + } + startedAt = CMClock.hostTimeClock.time + reader.startReading() + sampleBuffer = videoTrackOutput?.copyNextSampleBuffer() + } + + /// Cancels and stops the reader's output. + public func cancelReading() { + reader = nil + sampleBuffer = nil + videoTrackOutput = nil + } + + override func makeBounds(_ size: CGSize) -> CGRect { + guard parent != nil, let image = sampleBuffer?.formatDescription?.dimensions.size else { + return super.makeBounds(size) + } + let bounds = super.makeBounds(size) + switch videoGravity { + case .resizeAspect: + let scale = min(bounds.size.width / image.width, bounds.size.height / image.height) + let scaleSize = CGSize(width: image.width * scale, height: image.height * scale) + return super.makeBounds(scaleSize) + case .resizeAspectFill: + return bounds + default: + return bounds + } + } + + override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + guard let sampleBuffer, let pixelBuffer = sampleBuffer.imageBuffer else { + return nil + } + let image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( + bounds.size, + image: pixelBuffer.size + )) + return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + } + + override func draw(_ renderer: some ScreenRenderer) { + super.draw(renderer) + let duration = CMClock.hostTimeClock.time - startedAt + if let sampleBuffer, sampleBuffer.presentationTimeStamp <= duration { + self.sampleBuffer = videoTrackOutput?.copyNextSampleBuffer() + } + } +} diff --git a/Sources/Screen/ScreenObjectContainer.swift b/Sources/Screen/ScreenObjectContainer.swift new file mode 100644 index 000000000..f7f0432fb --- /dev/null +++ b/Sources/Screen/ScreenObjectContainer.swift @@ -0,0 +1,79 @@ +import AVFoundation +import Foundation + +protocol ScreenObjectContainerConvertible: AnyObject { + func addChild(_ child: ScreenObject?) throws + func removeChild(_ child: ScreenObject?) +} + +/// An object represents a collection of screen objects. +public class ScreenObjectContainer: ScreenObject, ScreenObjectContainerConvertible { + /// The error domain codes. + public enum Error: Swift.Error { + /// An error the screen object registry throws when the app registers a screen object twice by the same instance. + case alreadyExists + } + + /// The total of child counts. + public var childCounts: Int { + children.count + } + + private var children: [ScreenObject] = .init() + + /// Adds the specified screen object as a child of the current screen object container. + public func addChild(_ child: ScreenObject?) throws { + guard let child, child != self else { + return + } + if child.parent != nil { + throw Error.alreadyExists + } + child.parent = self + children.append(child) + invalidateLayout() + } + + /// Removes the specified screen object as a child of the current screen object container. + public func removeChild(_ child: ScreenObject?) { + guard let child, child.parent == self else { + return + } + guard let indexOf = children.firstIndex(where: { $0 == child }) else { + return + } + child.parent = nil + children.remove(at: indexOf) + invalidateLayout() + } + + override func layout(_ renderer: some ScreenRenderer) { + bounds = makeBounds(size) + children.forEach { child in + if child.shouldInvalidateLayout || shouldInvalidateLayout { + child.layout(renderer) + } + } + shouldInvalidateLayout = false + } + + override func draw(_ renderer: some ScreenRenderer) { + guard isVisible else { + return + } + children.forEach { child in + guard child.isVisible else { + return + } + child.draw(renderer) + } + } + + func getScreenObjects() -> [T] { + var objects = children.compactMap { $0 as? T } + children.compactMap { $0 as? ScreenObjectContainer }.forEach { + objects += $0.getScreenObjects() + } + return objects + } +} diff --git a/Sources/Screen/ScreenRenderer.swift b/Sources/Screen/ScreenRenderer.swift new file mode 100644 index 000000000..ee8904132 --- /dev/null +++ b/Sources/Screen/ScreenRenderer.swift @@ -0,0 +1,165 @@ +import Accelerate +import AVFoundation +import CoreImage +import Foundation + +/// A type that renders a screen object. +public protocol ScreenRenderer: AnyObject { + /// The CIContext instance. + var context: CIContext { get } + /// Specifies the backgroundColor for output video. + var backgroundColor: CGColor { get set } + /// Layouts a screen object. + func layout(_ screenObject: ScreenObject) + /// Draws a sceen object. + func draw(_ screenObject: ScreenObject) + /// Sets up the render target. + func setTarget(_ pixelBuffer: CVPixelBuffer?) +} + +final class ScreenRendererByCPU: ScreenRenderer { + private static var format = vImage_CGImageFormat( + bitsPerComponent: 8, + bitsPerPixel: 32, + colorSpace: nil, + bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue), + version: 0, + decode: nil, + renderingIntent: .defaultIntent) + + lazy var context = { + guard let deive = MTLCreateSystemDefaultDevice() else { + return CIContext(options: nil) + } + return CIContext(mtlDevice: deive) + }() + + var backgroundColor = CGColor(red: 0x00, green: 0x00, blue: 0x00, alpha: 0x00) { + didSet { + guard backgroundColor != oldValue, let components = backgroundColor.components else { + return + } + switch components.count { + case 2: + backgroundColorUInt8Array = [ + UInt8(components[1] * 255), + UInt8(components[0] * 255), + UInt8(components[0] * 255), + UInt8(components[0] * 255) + ] + case 3: + backgroundColorUInt8Array = [ + UInt8(components[2] * 255), + UInt8(components[0] * 255), + UInt8(components[1] * 255), + UInt8(components[1] * 255) + ] + case 4: + backgroundColorUInt8Array = [ + UInt8(components[3] * 255), + UInt8(components[0] * 255), + UInt8(components[1] * 255), + UInt8(components[2] * 255) + ] + default: + break + } + } + } + private var masks: [ScreenObject: vImage_Buffer] = [:] + private var images: [ScreenObject: vImage_Buffer] = [:] + private var canvas: vImage_Buffer = .init() + private var converter: vImageConverter? + private var pixelFormatType: OSType? { + didSet { + guard pixelFormatType != oldValue else { + return + } + converter = nil + } + } + private var backgroundColorUInt8Array: [UInt8] = [0x00, 0x00, 0x00, 0x00] + + func setTarget(_ pixelBuffer: CVPixelBuffer?) { + guard let pixelBuffer else { + return + } + pixelFormatType = pixelBuffer.pixelFormatType + if converter == nil { + let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(pixelBuffer).takeRetainedValue() + vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB()) + converter = try? vImageConverter.make( + sourceFormat: cvImageFormat, + destinationFormat: Self.format + ) + } + guard let converter else { + return + } + vImageBuffer_InitForCopyFromCVPixelBuffer( + &canvas, + converter, + pixelBuffer, + vImage_Flags(kvImageNoAllocate) + ) + switch pixelFormatType { + case kCVPixelFormatType_32ARGB: + vImageBufferFill_ARGB8888( + &canvas, + &backgroundColorUInt8Array, + vImage_Flags(kvImageNoFlags) + ) + default: + break + } + } + + func layout(_ screenObject: ScreenObject) { + autoreleasepool { + guard let image = screenObject.makeImage(self) else { + return + } + do { + images[screenObject]?.free() + images[screenObject] = try vImage_Buffer(cgImage: image, format: Self.format) + if 0 < screenObject.cornerRadius { + masks[screenObject] = ShapeFactory.shared.cornerRadius(screenObject.bounds.size, cornerRadius: screenObject.cornerRadius) + } else { + masks[screenObject] = nil + } + } catch { + } + } + } + + func draw(_ screenObject: ScreenObject) { + guard var image = images[screenObject] else { + return + } + + if var mask = masks[screenObject] { + vImageSelectChannels_ARGB8888(&mask, &image, &image, 0x8, vImage_Flags(kvImageNoFlags)) + } + + let origin = screenObject.bounds.origin + let start = Int(origin.y) * canvas.rowBytes + Int(origin.x) * 4 + var destination = vImage_Buffer( + data: canvas.data.advanced(by: start), + height: image.height, + width: image.width, + rowBytes: canvas.rowBytes + ) + + switch pixelFormatType { + case kCVPixelFormatType_32ARGB: + vImageAlphaBlend_ARGB8888( + &image, + &destination, + &destination, + vImage_Flags(kvImageDoNotTile) + ) + default: + break + } + } +} diff --git a/Sources/Screen/VideoEffect.swift b/Sources/Screen/VideoEffect.swift index ab939765c..d4a265c3a 100644 --- a/Sources/Screen/VideoEffect.swift +++ b/Sources/Screen/VideoEffect.swift @@ -21,9 +21,10 @@ import Foundation /// } /// } /// ``` -open class VideoEffect: NSObject { - /// Specifies the ciContext object. - public var ciContext: CIContext? +open class VideoEffect { + /// Creates an object instance. + public init() { + } /// Executes to apply a video effect. open func execute(_ image: CIImage, info: CMSampleBuffer?) -> CIImage { diff --git a/Tests/CMVideoSampleBufferFactory.swift b/Tests/CMVideoSampleBufferFactory.swift new file mode 100644 index 000000000..4be591f7e --- /dev/null +++ b/Tests/CMVideoSampleBufferFactory.swift @@ -0,0 +1,33 @@ +import Foundation +import AVFoundation + +enum CMVideoSampleBufferFactory { + static func makeSampleBuffer(width: Int, height: Int) -> CMSampleBuffer? { + var pixelBuffer: CVPixelBuffer? + CVPixelBufferCreate(nil, width, height, kCVPixelFormatType_32BGRA, nil, &pixelBuffer) + guard let pixelBuffer else { + return nil + } + var outputFormat: CMFormatDescription? + CMVideoFormatDescriptionCreateForImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescriptionOut: &outputFormat + ) + guard let outputFormat else { + return nil + } + var timingInfo = CMSampleTimingInfo() + var sampleBuffer: CMSampleBuffer? + guard CMSampleBufferCreateReadyWithImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: pixelBuffer, + formatDescription: outputFormat, + sampleTiming: &timingInfo, + sampleBufferOut: &sampleBuffer + ) == noErr else { + return nil + } + return sampleBuffer + } +} diff --git a/Tests/Screen/ScreenObjectContainerTests.swift b/Tests/Screen/ScreenObjectContainerTests.swift new file mode 100644 index 000000000..4cb568050 --- /dev/null +++ b/Tests/Screen/ScreenObjectContainerTests.swift @@ -0,0 +1,28 @@ +import Foundation +import XCTest +import AVFoundation + +@testable import HaishinKit + +final class ScreenObjectContainerTests: XCTestCase { + func testLookUpVideoTrackScreenObject() { + let container1 = ScreenObjectContainer() + + let videoTrack1 = VideoTrackScreenObject() + let videoTrack2 = VideoTrackScreenObject() + + try? container1.addChild(videoTrack1) + try? container1.addChild(videoTrack2) + + let videoTracks1 = container1.getScreenObjects() as [VideoTrackScreenObject] + XCTAssertEqual(videoTracks1.count, 2) + + let container2 = ScreenObjectContainer() + let videoTrack3 = VideoTrackScreenObject() + try? container2.addChild(videoTrack3) + try? container1.addChild(container2) + + let videoTracks2 = container1.getScreenObjects() as [VideoTrackScreenObject] + XCTAssertEqual(videoTracks2.count, 3) + } +} diff --git a/Tests/Screen/ScreenObjectTests.swift b/Tests/Screen/ScreenObjectTests.swift new file mode 100644 index 000000000..5380ad71e --- /dev/null +++ b/Tests/Screen/ScreenObjectTests.swift @@ -0,0 +1,97 @@ +import Foundation +import XCTest +import AVFoundation + +@testable import HaishinKit + +final class ScreenObjectTests: XCTestCase { + func testScreenHorizontalAlignmentRect() { + let screen = Screen() + + let object1 = ScreenObject() + object1.size = .init(width: 100, height: 100) + object1.horizontalAlignment = .left + + let object2 = ScreenObject() + object2.size = .init(width: 100, height: 100) + object2.horizontalAlignment = .center + + let object3 = ScreenObject() + object3.size = .init(width: 100, height: 100) + object3.horizontalAlignment = .right + + try? screen.addChild(object1) + try? screen.addChild(object2) + try? screen.addChild(object3) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + + XCTAssertEqual(object1.bounds, .init(origin: .zero, size: object1.size)) + XCTAssertEqual(object2.bounds, .init(x: 750, y: 0, width: 100, height: 100)) + XCTAssertEqual(object3.bounds, .init(x: 1500, y: 0, width: 100, height: 100)) + } + + func testScreenVerticalAlignmentRect() { + let screen = Screen() + + let object0 = ScreenObject() + object0.size = .zero + object0.verticalAlignment = .top + + let object1 = ScreenObject() + object1.size = .init(width: 100, height: 100) + object1.verticalAlignment = .top + + let object2 = ScreenObject() + object2.size = .init(width: 100, height: 100) + object2.verticalAlignment = .middle + + let object3 = ScreenObject() + object3.size = .init(width: 100, height: 100) + object3.verticalAlignment = .bottom + + try? screen.addChild(object0) + try? screen.addChild(object1) + try? screen.addChild(object2) + try? screen.addChild(object3) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + + XCTAssertEqual(object0.bounds, .init(x: 0, y: 0, width: 1600, height: 900)) + XCTAssertEqual(object1.bounds, .init(x: 0, y: 0, width: object1.size.width, height: object1.size.height)) + XCTAssertEqual(object2.bounds, .init(x: 0, y: 400, width: 100, height: 100)) + XCTAssertEqual(object3.bounds, .init(x: 0, y: 800, width: 100, height: 100)) + } + + func testScreenWithContainerTests() { + let screen = Screen() + + let container = ScreenObjectContainer() + container.size = .init(width: 200, height: 100) + container.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0) + + let object0 = ScreenObject() + object0.size = .zero + object0.verticalAlignment = .top + + let object1 = ScreenObject() + object1.size = .init(width: 100, height: 100) + object1.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0) + object1.verticalAlignment = .top + + try? container.addChild(object0) + try? container.addChild(object1) + try? screen.addChild(container) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + + XCTAssertEqual(object0.bounds, .init(x: 16, y: 16, width: 200, height: 100)) + XCTAssertEqual(object1.bounds, .init(x: 32, y: 32, width: 100, height: 100)) + } +} diff --git a/Tests/Screen/VideoTrackScreenObjectTests.swift b/Tests/Screen/VideoTrackScreenObjectTests.swift new file mode 100644 index 000000000..5f5903acf --- /dev/null +++ b/Tests/Screen/VideoTrackScreenObjectTests.swift @@ -0,0 +1,41 @@ +import Foundation +import XCTest +import AVFoundation + +@testable import HaishinKit + +final class VideoTrackObjectContainerTests: XCTestCase { + func testHorizontalAlignmentBounds() { + let screen = Screen() + + let object1 = VideoTrackScreenObject() + object1.videoGravity = .resizeAspect + object1.size = .init(width: 160, height: 90) + object1.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!) + object1.horizontalAlignment = .left + + let object2 = VideoTrackScreenObject() + object2.videoGravity = .resizeAspect + object2.size = .init(width: 160, height: 90) + object2.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!) + object2.horizontalAlignment = .center + + let object3 = VideoTrackScreenObject() + object3.videoGravity = .resizeAspect + object3.size = .init(width: 160, height: 90) + object3.enqueue(CMVideoSampleBufferFactory.makeSampleBuffer(width: 900, height: 1600)!) + object3.horizontalAlignment = .right + + try? screen.addChild(object1) + try? screen.addChild(object2) + try? screen.addChild(object3) + + if let sampleBuffer = CMVideoSampleBufferFactory.makeSampleBuffer(width: 1600, height: 900) { + _ = screen.render(sampleBuffer) + } + + XCTAssertEqual(object1.bounds, .init(x: 0, y: 0, width: 50.625, height: 90)) + XCTAssertEqual(object2.bounds, .init(x: 774.6875, y: 0, width: 50.625, height: 90)) + XCTAssertEqual(object3.bounds, .init(x: 1549.375, y: 0, width: 50.625, height: 90)) + } +}