Skip to content

Commit

Permalink
Composition refactors
Browse files Browse the repository at this point in the history
  • Loading branch information
andreilob committed Jun 8, 2023
1 parent 949e113 commit ec1cfc5
Show file tree
Hide file tree
Showing 26 changed files with 882 additions and 497 deletions.
37 changes: 37 additions & 0 deletions Sources/CameraKage/AVFoundationsInternals/AudioCaptureDevice.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
//
// AudioCaptureDevice.swift
//
//
// Created by Lobont Andrei on 06.06.2023.
//

import AVFoundation

class AudioCaptureDevice {
private(set) var audioDevice: AVCaptureDevice!
private(set) var audioDeviceInput: AVCaptureDeviceInput!
private(set) var audioDevicePort: AVCaptureDeviceInput.Port!

func configureAudioDevice(forSession session: CaptureSession,
andOptions options: CameraComponentParsedOptions,
isFlipped: Bool) -> Bool {
do {
let camera = isFlipped ? options.flipCameraDevice : options.cameraDevice
guard let audioDevice = AVCaptureDevice.default(for: .audio) else { return false }
self.audioDevice = audioDevice

let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard session.canAddInput(audioDeviceInput) else { return false }
session.addInputWithNoConnections(audioDeviceInput)
self.audioDeviceInput = audioDeviceInput

guard let audioPort = audioDeviceInput.ports(for: .audio,
sourceDeviceType: .builtInMicrophone,
sourceDevicePosition: camera.avDevicePosition).first else { return false }
self.audioDevicePort = audioPort
return true
} catch {
return false
}
}
}
21 changes: 21 additions & 0 deletions Sources/CameraKage/AVFoundationsInternals/CaptureSession.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
//
// CaptureSession.swift
//
//
// Created by Lobont Andrei on 05.06.2023.
//

import AVFoundation

class CaptureSession: AVCaptureMultiCamSession {
func cleanupSession() {
defer {
commitConfiguration()
}
beginConfiguration()

outputs.forEach { removeOutput($0) }
inputs.forEach { removeInput($0) }
connections.forEach { removeConnection($0) }
}
}
90 changes: 90 additions & 0 deletions Sources/CameraKage/AVFoundationsInternals/MovieOutput.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
//
// MovieOutput.swift
//
//
// Created by Lobont Andrei on 05.06.2023.
//

import AVFoundation

class MovieOutput: AVCaptureMovieFileOutput {
private(set) var videoPortConnection: AVCaptureConnection?
private(set) var audioPortConnection: AVCaptureConnection?

var onMovieCaptureSuccess: ((URL) -> Void)?
var onMovieCaptureStart: ((URL) -> Void)?
var onMovieCaptureError: ((CameraError) -> Void)?

func startMovieRecording() {
guard !isRecording else { return }
startRecording(to: .makeTempUrl(for: .video), recordingDelegate: self)
}

func stopMovieRecording() {
stopRecording()
}

func configureMovieFileOutput(forSession session: CaptureSession,
andOptions options: CameraComponentParsedOptions,
videoDevice: VideoCaptureDevice,
audioDevice: AudioCaptureDevice,
isFlipped: Bool) -> Bool {
let camera = isFlipped ? options.flipCameraDevice : options.cameraDevice
guard session.canAddOutput(self) else { return false }
session.addOutputWithNoConnections(self)
maxRecordedDuration = options.maxVideoDuration

let videoConnection = AVCaptureConnection(inputPorts: [videoDevice.videoDevicePort], output: self)
guard session.canAddConnection(videoConnection) else { return false }
session.addConnection(videoConnection)
videoConnection.isVideoMirrored = camera.avDevicePosition == .front
videoConnection.videoOrientation = options.cameraOrientation
if videoConnection.isVideoStabilizationSupported {
videoConnection.preferredVideoStabilizationMode = options.videoStabilizationMode
}
self.videoPortConnection = videoConnection

let audioConnection = AVCaptureConnection(inputPorts: [audioDevice.audioDevicePort], output: self)
guard session.canAddConnection(audioConnection) else { return false }
session.addConnection(audioConnection)
if availableVideoCodecTypes.contains(.hevc) {
setOutputSettings([AVVideoCodecKey: AVVideoCodecType.hevc],
for: videoConnection)
}
self.audioPortConnection = audioConnection

return true
}
}

// MARK: - AVCaptureFileOutputRecordingDelegate
extension MovieOutput: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput,
didStartRecordingTo fileURL: URL,
from connections: [AVCaptureConnection]) {
onMovieCaptureStart?(fileURL)
}

func fileOutput(_ output: AVCaptureFileOutput,
didFinishRecordingTo outputFileURL: URL,
from connections: [AVCaptureConnection],
error: Error?) {
guard error == nil else {
cleanup(outputFileURL)
onMovieCaptureError?(.cameraComponentError(reason: .failedToOutputMovie(message: error?.localizedDescription)))
return
}
onMovieCaptureSuccess?(outputFileURL)
}

private func cleanup(_ url: URL) {
let path = url.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
onMovieCaptureError?(.cameraComponentError(reason: .failedToRemoveFileManagerItem))
}
}
}
}
75 changes: 75 additions & 0 deletions Sources/CameraKage/AVFoundationsInternals/PhotoOutput.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
//
// PhotoOutput.swift
//
//
// Created by Lobont Andrei on 05.06.2023.
//

import AVFoundation

class PhotoOutput: AVCapturePhotoOutput {
private var photoData: Data?

private(set) var videoPortConnection: AVCaptureConnection?

var onPhotoCaptureSuccess: ((Data) -> Void)?
var onPhotoCaptureError: ((CameraError) -> Void)?

func capturePhoto(_ flashMode: FlashMode,
redEyeCorrection: Bool) {
var photoSettings = AVCapturePhotoSettings()
photoSettings.flashMode = flashMode.avFlashOption
photoSettings.isAutoRedEyeReductionEnabled = redEyeCorrection

if availablePhotoCodecTypes.contains(.hevc) {
photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
}
if let previewPhotoPixelFormatType = photoSettings.availablePreviewPhotoPixelFormatTypes.first {
photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPhotoPixelFormatType]
}

capturePhoto(with: photoSettings, delegate: self)
}

func configurePhotoOutput(forSession session: CaptureSession,
andOptions options: CameraComponentParsedOptions,
videoDevice: VideoCaptureDevice,
isFlipped: Bool) -> Bool {
let camera = isFlipped ? options.flipCameraDevice : options.cameraDevice
guard session.canAddOutput(self) else { return false }
session.addOutputWithNoConnections(self)
maxPhotoQualityPrioritization = options.photoQualityPrioritizationMode

let photoConnection = AVCaptureConnection(inputPorts: [videoDevice.videoDevicePort], output: self)
guard session.canAddConnection(photoConnection) else { return false }
session.addConnection(photoConnection)
photoConnection.videoOrientation = options.cameraOrientation
photoConnection.isVideoMirrored = camera.avDevicePosition == .front
self.videoPortConnection = photoConnection

return true
}
}

// MARK: - AVCapturePhotoCaptureDelegate
extension PhotoOutput: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput,
didFinishProcessingPhoto photo: AVCapturePhoto,
error: Error?) {
guard error == nil else {
onPhotoCaptureError?(.cameraComponentError(reason: .failedToOutputPhoto(message: error?.localizedDescription)))
return
}
photoData = photo.fileDataRepresentation()
}

func photoOutput(_ output: AVCapturePhotoOutput,
didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings,
error: Error?) {
guard error == nil, let photoData else {
onPhotoCaptureError?(.cameraComponentError(reason: .failedToOutputPhoto(message: error?.localizedDescription)))
return
}
onPhotoCaptureSuccess?(photoData)
}
}
27 changes: 27 additions & 0 deletions Sources/CameraKage/AVFoundationsInternals/PreviewLayer.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
//
// PreviewLayer.swift
//
//
// Created by Lobont Andrei on 06.06.2023.
//

import AVFoundation

class PreviewLayer: AVCaptureVideoPreviewLayer {
private(set) var previewLayerConnection: AVCaptureConnection!

func configurePreviewLayer(forSession session: CaptureSession,
andOptions options: CameraComponentParsedOptions,
videoDevice: VideoCaptureDevice) -> Bool {
setSessionWithNoConnection(session)
videoGravity = options.videoGravity

let previewLayerConnection = AVCaptureConnection(inputPort: videoDevice.videoDevicePort, videoPreviewLayer: self)
previewLayerConnection.videoOrientation = options.cameraOrientation
guard session.canAddConnection(previewLayerConnection) else { return false }
session.addConnection(previewLayerConnection)
self.previewLayerConnection = previewLayerConnection

return true
}
}
111 changes: 111 additions & 0 deletions Sources/CameraKage/AVFoundationsInternals/VideoCaptureDevice.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
//
// VideoCaptureDevice.swift
//
//
// Created by Lobont Andrei on 06.06.2023.
//

import AVFoundation

class VideoCaptureDevice: NSObject {
@objc private(set) dynamic var videoDevice: AVCaptureDevice!
@objc private(set) dynamic var videoDeviceInput: AVCaptureDeviceInput!
private(set) var videoDevicePort: AVCaptureDeviceInput.Port!

private var keyValueObservations = [NSKeyValueObservation]()

var onVideoDeviceError: ((CameraError) -> Void)?

func focus(with focusMode: FocusMode,
exposureMode: ExposureMode,
at point: CGPoint,
monitorSubjectAreaChange: Bool) throws {
do {
try videoDevice.lockForConfiguration()
if videoDevice.isFocusPointOfInterestSupported &&
videoDevice.isFocusModeSupported(focusMode.avFocusOption) {
videoDevice.focusPointOfInterest = point
videoDevice.focusMode = focusMode.avFocusOption
}
if videoDevice.isExposurePointOfInterestSupported &&
videoDevice.isExposureModeSupported(exposureMode.avExposureOption) {
videoDevice.exposurePointOfInterest = point
videoDevice.exposureMode = exposureMode.avExposureOption
}
videoDevice.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
videoDevice.unlockForConfiguration()
} catch {
throw CameraError.cameraComponentError(reason: .failedToLockDevice)
}
}

func zoom(atScale: CGFloat) throws {
do {
try videoDevice.lockForConfiguration()
videoDevice.videoZoomFactor = atScale
videoDevice.unlockForConfiguration()
} catch {
throw CameraError.cameraComponentError(reason: .failedToLockDevice)
}
}

func minMaxZoom(_ factor: CGFloat,
with options: CameraComponentParsedOptions) -> CGFloat {
let maxFactor = max(factor, options.minimumZoomScale)
return min(min(maxFactor, options.maximumZoomScale), videoDevice.activeFormat.videoMaxZoomFactor)
}

func configureVideoDevice(forSession session: CaptureSession,
andOptions options: CameraComponentParsedOptions,
isFlipped: Bool) -> Bool {
do {
let camera = isFlipped ? options.flipCameraDevice : options.cameraDevice

guard let videoDevice = AVCaptureDevice.default(camera.avDeviceType,
for: .video,
position: camera.avDevicePosition) else { return false }
self.videoDevice = videoDevice
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
guard session.canAddInput(videoDeviceInput) else { return false }
session.addInputWithNoConnections(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput

guard let videoPort = videoDeviceInput.ports(for: .video,
sourceDeviceType: camera.avDeviceType,
sourceDevicePosition: camera.avDevicePosition).first else { return false }
self.videoDevicePort = videoPort
return true
} catch {
return false
}
}

func removeObserver() {
keyValueObservations.forEach { $0.invalidate() }
keyValueObservations.removeAll()
}

func addObserver() {
let systemPressureStateObservation = observe(\.videoDevice.systemPressureState, options: .new) { _, change in
guard let systemPressureState = change.newValue else { return }
self.setRecommendedFrameRateRangeForPressureState(systemPressureState: systemPressureState)
}
keyValueObservations.append(systemPressureStateObservation)
}

private func setRecommendedFrameRateRangeForPressureState(systemPressureState: AVCaptureDevice.SystemPressureState) {
let pressureLevel = systemPressureState.level
if pressureLevel == .serious || pressureLevel == .critical {
do {
try videoDevice.lockForConfiguration()
videoDevice.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 20)
videoDevice.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 15)
videoDevice.unlockForConfiguration()
} catch {
onVideoDeviceError?(.cameraComponentError(reason: .failedToLockDevice))
}
} else if pressureLevel == .shutdown {
onVideoDeviceError?(.cameraComponentError(reason: .pressureLevelShutdown))
}
}
}
Loading

0 comments on commit ec1cfc5

Please sign in to comment.