Skip to content

Commit

Permalink
Add AVSampleBufferDisplayLayer rendering mode to VideoView (#222)
Browse files Browse the repository at this point in the history
* Create SampleBufferVideoRenderer.swift

* fix rendering

* render modes

* multitasking camera access

* render using AVSampleBufferDisplayLayer

* expose layer

* params for swift ui

* runtime render mode switch
  • Loading branch information
hiroshihorie authored Sep 6, 2023
1 parent b53e3d8 commit e49434e
Show file tree
Hide file tree
Showing 3 changed files with 184 additions and 58 deletions.
4 changes: 4 additions & 0 deletions Sources/LiveKit/SwiftUI/SwiftUIVideoView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ public struct SwiftUIVideoView: NativeViewRepresentable {
let track: VideoTrack
let layoutMode: VideoView.LayoutMode
let mirrorMode: VideoView.MirrorMode
let renderMode: VideoView.RenderMode
let debugMode: Bool

@Binding var isRendering: Bool
Expand All @@ -78,6 +79,7 @@ public struct SwiftUIVideoView: NativeViewRepresentable {
public init(_ track: VideoTrack,
layoutMode: VideoView.LayoutMode = .fill,
mirrorMode: VideoView.MirrorMode = .auto,
renderMode: VideoView.RenderMode = .auto,
debugMode: Bool = false,
isRendering: Binding<Bool> = .constant(false),
dimensions: Binding<Dimensions?> = .constant(nil),
Expand All @@ -86,6 +88,7 @@ public struct SwiftUIVideoView: NativeViewRepresentable {
self.track = track
self.layoutMode = layoutMode
self.mirrorMode = mirrorMode
self.renderMode = renderMode
self.debugMode = debugMode

self._isRendering = isRendering
Expand Down Expand Up @@ -117,6 +120,7 @@ public struct SwiftUIVideoView: NativeViewRepresentable {
videoView.track = track
videoView.layoutMode = layoutMode
videoView.mirrorMode = mirrorMode
videoView.renderMode = renderMode
videoView.debugMode = debugMode

// update
Expand Down
73 changes: 73 additions & 0 deletions Sources/LiveKit/Views/InternalSampleBufferVideoRenderer.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
/*
* Copyright 2023 LiveKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import Foundation
import WebRTC

internal class InternalSampleBufferVideoRenderer: NativeView {

public let sampleBufferDisplayLayer: AVSampleBufferDisplayLayer

override init(frame: CGRect) {
sampleBufferDisplayLayer = AVSampleBufferDisplayLayer()
super.init(frame: frame)
sampleBufferDisplayLayer.videoGravity = .resizeAspectFill
#if os(macOS)
// this is required for macOS
wantsLayer = true
layer?.insertSublayer(sampleBufferDisplayLayer, at: 0)
#elseif os(iOS)
layer.insertSublayer(sampleBufferDisplayLayer, at: 0)
#else
fatalError("Unimplemented")
#endif
}

required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}

override func performLayout() {
super.performLayout()
sampleBufferDisplayLayer.frame = bounds
}
}

extension InternalSampleBufferVideoRenderer: RTCVideoRenderer {

public func setSize(_ size: CGSize) {
//
}

public func renderFrame(_ frame: RTCVideoFrame?) {

guard let frame = frame else { return }

guard let rtcPixelBuffer = frame.buffer as? RTCCVPixelBuffer else {
logger.warning("frame.buffer is not a RTCCVPixelBuffer")
return
}

guard let sampleBuffer = CMSampleBuffer.from(rtcPixelBuffer.pixelBuffer) else {
logger.error("Failed to convert CVPixelBuffer to CMSampleBuffer")
return
}

DispatchQueue.main.async {
self.sampleBufferDisplayLayer.enqueue(sampleBuffer)
}
}
}
165 changes: 107 additions & 58 deletions Sources/LiveKit/Views/VideoView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,21 @@ public class VideoView: NativeView, Loggable {
case mirror
}

@objc
public enum RenderMode: Int, Codable, CustomStringConvertible {
case auto
case metal
case sampleBuffer

public var description: String {
switch self {
case .auto: return ".auto"
case .metal: return ".metal"
case .sampleBuffer: return ".sampleBuffer"
}
}
}

/// ``LayoutMode-swift.enum`` of the ``VideoView``.
@objc
public var layoutMode: LayoutMode {
Expand All @@ -64,6 +79,12 @@ public class VideoView: NativeView, Loggable {
set { _state.mutate { $0.mirrorMode = newValue } }
}

@objc
public var renderMode: RenderMode {
get { _state.renderMode }
set { _state.mutate { $0.renderMode = newValue } }
}

/// Force video to be rotated to preferred ``VideoRotation``.
public var rotationOverride: VideoRotation? {
get { _state.rotationOverride }
Expand Down Expand Up @@ -118,6 +139,14 @@ public class VideoView: NativeView, Loggable {
@objc
public var didRenderFirstFrame: Bool { _state.didRenderFirstFrame }

/// Access the internal AVSampleBufferDisplayLayer used for rendering.
/// This is only available when the renderer is using AVSampleBufferDisplayLayer.
/// Recommended to be accessed from main thread.
public var avSampleBufferDisplayLayer: AVSampleBufferDisplayLayer? {
guard let nr = nativeRenderer as? InternalSampleBufferVideoRenderer else { return nil }
return nr.sampleBufferDisplayLayer
}

// MARK: - Internal

internal struct State: Equatable {
Expand All @@ -132,6 +161,7 @@ public class VideoView: NativeView, Loggable {
var didLayout: Bool = false
var layoutMode: LayoutMode = .fill
var mirrorMode: MirrorMode = .auto
var renderMode: RenderMode = .sampleBuffer
var rotationOverride: VideoRotation?

var debugMode: Bool = false
Expand Down Expand Up @@ -181,64 +211,75 @@ public class VideoView: NativeView, Loggable {
guard let self = self else { return }

let shouldRenderDidUpdate = newState.shouldRender != oldState.shouldRender
let renderModeDidUpdate = newState.renderMode != oldState.renderMode

// track was swapped
let trackDidUpdate = !Self.track(oldState.track as? VideoTrack, isEqualWith: newState.track as? VideoTrack)

if trackDidUpdate || shouldRenderDidUpdate {
// Enter .main only if the following conditions are met...
if trackDidUpdate || shouldRenderDidUpdate || renderModeDidUpdate {

Task.detached { @MainActor in

// clean up old track
if let track = oldState.track as? VideoTrack {
var didReCreateNativeRenderer = false

track.remove(videoRenderer: self)
if trackDidUpdate || shouldRenderDidUpdate {

if let nr = self.nativeRenderer {
self.log("removing nativeRenderer")
nr.removeFromSuperview()
self.nativeRenderer = nil
}
// clean up old track
if let track = oldState.track as? VideoTrack {

// CapturerDelegate
if let localTrack = track as? LocalVideoTrack {
localTrack.capturer.remove(delegate: self)
}
track.remove(videoRenderer: self)

if let nr = self.nativeRenderer {
self.log("removing nativeRenderer")
nr.removeFromSuperview()
self.nativeRenderer = nil
}

// CapturerDelegate
if let localTrack = track as? LocalVideoTrack {
localTrack.capturer.remove(delegate: self)
}

// notify detach
track.delegates.notify(label: { "track.didDetach videoView: \(self)" }) { [weak self, weak track] (delegate) -> Void in
guard let self = self, let track = track else { return }
delegate.track?(track, didDetach: self)
// notify detach
track.delegates.notify(label: { "track.didDetach videoView: \(self)" }) { [weak self, weak track] (delegate) -> Void in
guard let self = self, let track = track else { return }
delegate.track?(track, didDetach: self)
}
}
}

// set new track
if let track = newState.track as? VideoTrack, newState.shouldRender {
// set new track
if let track = newState.track as? VideoTrack, newState.shouldRender {

// re-create renderer on main thread
let nr = self.reCreateNativeRenderer()
// re-create renderer on main thread
let nr = self.reCreateNativeRenderer()
didReCreateNativeRenderer = true

track.add(videoRenderer: self)
track.add(videoRenderer: self)

if let frame = track._state.videoFrame {
self.log("rendering cached frame tack: \(track._state.sid ?? "nil")")
nr.renderFrame(frame)
self.setNeedsLayout()
}
if let frame = track._state.videoFrame {
self.log("rendering cached frame tack: \(track._state.sid ?? "nil")")
nr.renderFrame(frame)
self.setNeedsLayout()
}

// CapturerDelegate
if let localTrack = track as? LocalVideoTrack {
localTrack.capturer.add(delegate: self)
}
// CapturerDelegate
if let localTrack = track as? LocalVideoTrack {
localTrack.capturer.add(delegate: self)
}

// notify attach
track.delegates.notify(label: { "track.didAttach videoView: \(self)" }) { [weak self, weak track] (delegate) -> Void in
guard let self = self, let track = track else { return }
delegate.track?(track, didAttach: self)
// notify attach
track.delegates.notify(label: { "track.didAttach videoView: \(self)" }) { [weak self, weak track] (delegate) -> Void in
guard let self = self, let track = track else { return }
delegate.track?(track, didAttach: self)
}
}
}
}

if renderModeDidUpdate, !didReCreateNativeRenderer {
self.reCreateNativeRenderer()
}
}
}

// isRendering updated
Expand Down Expand Up @@ -273,6 +314,7 @@ public class VideoView: NativeView, Loggable {
if newState.debugMode != oldState.debugMode ||
newState.layoutMode != oldState.layoutMode ||
newState.mirrorMode != oldState.mirrorMode ||
newState.renderMode != oldState.renderMode ||
newState.rotationOverride != oldState.rotationOverride ||
newState.didRenderFirstFrame != oldState.didRenderFirstFrame ||
shouldRenderDidUpdate || trackDidUpdate {
Expand Down Expand Up @@ -349,9 +391,10 @@ public class VideoView: NativeView, Loggable {
let _dimensions = state.track?.dimensions ?? .zero
let _didRenderFirstFrame = state.didRenderFirstFrame ? "true" : "false"
let _isRendering = state.isRendering ? "true" : "false"
let _renderMode = String(describing: state.renderMode)
let _viewCount = state.track?.videoRenderers.allObjects.count ?? 0
let debugView = ensureDebugTextView()
debugView.text = "#\(hashValue)\n" + "\(_trackSid)\n" + "\(_dimensions.width)x\(_dimensions.height)\n" + "enabled: \(isEnabled)\n" + "firstFrame: \(_didRenderFirstFrame)\n" + "isRendering: \(_isRendering)\n" + "viewCount: \(_viewCount)\n" + "FPS: \(_currentFPS)\n"
debugView.text = "#\(hashValue)\n" + "\(_trackSid)\n" + "\(_dimensions.width)x\(_dimensions.height)\n" + "enabled: \(isEnabled)\n" + "firstFrame: \(_didRenderFirstFrame)\n" + "isRendering: \(_isRendering)\n" + "renderMode: \(_renderMode)\n" + "viewCount: \(_viewCount)\n" + "FPS: \(_currentFPS)\n"
debugView.frame = bounds
#if os(iOS)
debugView.layer.borderColor = (state.shouldRender ? UIColor.green : UIColor.red).withAlphaComponent(0.5).cgColor
Expand Down Expand Up @@ -448,12 +491,13 @@ private extension VideoView {
return view
}

@discardableResult
func reCreateNativeRenderer() -> NativeRendererView {
// should always be on main thread
assert(Thread.current.isMainThread, "must be called on main thread")

// create a new rendererView
let newView = VideoView.createNativeRendererView()
let newView = VideoView.createNativeRendererView(for: _state.renderMode)
addSubview(newView)

// keep the old rendererView
Expand Down Expand Up @@ -607,29 +651,34 @@ extension VideoView {
#endif
}

internal static func createNativeRendererView() -> NativeRendererView {
logger.log("Using RTCMTLVideoView for VideoView's Renderer", type: VideoView.self)
let result = RTCMTLVideoView()

#if os(iOS)
result.contentMode = .scaleAspectFit
result.videoContentMode = .scaleAspectFit
#endif
internal static func createNativeRendererView(for renderMode: VideoView.RenderMode) -> NativeRendererView {
if case .sampleBuffer = renderMode {
logger.log("Using AVSampleBufferDisplayLayer for VideoView's Renderer", type: VideoView.self)
return InternalSampleBufferVideoRenderer()
} else {
logger.log("Using RTCMTLVideoView for VideoView's Renderer", type: VideoView.self)
let result = RTCMTLVideoView()

// extra checks for MTKView
if let mtkView = result.findMTKView() {
#if os(iOS)
mtkView.contentMode = .scaleAspectFit
#elseif os(macOS)
mtkView.layerContentsPlacement = .scaleProportionallyToFit
result.contentMode = .scaleAspectFit
result.videoContentMode = .scaleAspectFit
#endif
// ensure it's capable of rendering 60fps
// https://developer.apple.com/documentation/metalkit/mtkview/1536027-preferredframespersecond
logger.log("preferredFramesPerSecond = 60", type: VideoView.self)
mtkView.preferredFramesPerSecond = 60
}

return result
// extra checks for MTKView
if let mtkView = result.findMTKView() {
#if os(iOS)
mtkView.contentMode = .scaleAspectFit
#elseif os(macOS)
mtkView.layerContentsPlacement = .scaleProportionallyToFit
#endif
// ensure it's capable of rendering 60fps
// https://developer.apple.com/documentation/metalkit/mtkview/1536027-preferredframespersecond
logger.log("preferredFramesPerSecond = 60", type: VideoView.self)
mtkView.preferredFramesPerSecond = 60
}

return result
}
}
}

Expand Down

0 comments on commit e49434e

Please sign in to comment.