Skip to content

Commit

Permalink
Remove prints & signpost
Browse files Browse the repository at this point in the history
  • Loading branch information
DavidBertet committed Jun 30, 2023
1 parent d730a76 commit a23aa47
Show file tree
Hide file tree
Showing 6 changed files with 0 additions and 134 deletions.
29 changes: 0 additions & 29 deletions ios/ReactNativeCameraKit/CameraView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,6 @@
import AVFoundation
import UIKit

import os.signpost

@available(iOS 12.0, *)
let log = OSLog(
subsystem: "com.bertet.app",
category: .pointsOfInterest
)

/*
* View abtracting the logic unrelated to the actual camera
* Like permission, ratio overlay, focus, zoom gesture, write image, etc
Expand Down Expand Up @@ -90,11 +82,6 @@ class CameraView: UIView {
}

override init(frame: CGRect) {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "init CameraView")
}

print("--------- init CameraView")
#if targetEnvironment(simulator)
camera = SimulatorCamera()
#else
Expand Down Expand Up @@ -122,9 +109,6 @@ class CameraView: UIView {
using: { [weak self] notification in self?.orientationChanged(notification: notification) })

handleCameraPermission()
if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "init CameraView")
}
}

override func removeFromSuperview() {
Expand All @@ -144,8 +128,6 @@ class CameraView: UIView {
override func reactSetFrame(_ frame: CGRect) {
super.reactSetFrame(frame)

print("---------- CameraView reactSetFrame \(frame) \(bounds) \(Thread.current)")

camera.previewView.frame = bounds

scannerInterfaceView.frame = bounds
Expand All @@ -158,21 +140,14 @@ class CameraView: UIView {
}

override func removeReactSubview(_ subview: UIView) {
print("---------- removeReactSubview \(subview)")
subview.removeFromSuperview()
super.removeReactSubview(subview)
}

// Called once when all props have been set, then every time one is updated
override func didSetProps(_ changedProps: [String]) {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "didSetProps")
}

hasPropBeenSetup = true

print("------ didSetProps \(changedProps) \(Thread.current)")

// Camera settings
if changedProps.contains("cameraType") {
camera.update(cameraType: cameraType)
Expand Down Expand Up @@ -251,10 +226,6 @@ class CameraView: UIView {
}
}
}

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "didSetProps")
}
}

// MARK: Public
Expand Down
1 change: 0 additions & 1 deletion ios/ReactNativeCameraKit/RatioOverlayView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ class RatioOverlayView: UIView {
}

func setColor(_ color: UIColor) {
print("setColor \(color)")
topView.backgroundColor = color
bottomView.backgroundColor = color
}
Expand Down
98 changes: 0 additions & 98 deletions ios/ReactNativeCameraKit/RealCamera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@
import AVFoundation
import UIKit

import os.signpost

/*
* Real camera implementation that uses AVFoundation
*/
Expand Down Expand Up @@ -54,24 +52,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega

func cameraRemovedFromSuperview() {
sessionQueue.async {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "stopRunning")
}

if self.setupResult == .success {
print("------- stop running \(Thread.current)")
self.session.stopRunning()
self.removeObservers()
}

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "stopRunning")
}
}
}

deinit {
print("------- deinit RealCamera \(Thread.current)")
removeObservers()
}

Expand All @@ -80,12 +68,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
func setup(cameraType: CameraType, supportedBarcodeType: [AVMetadataObject.ObjectType]) {
self.cameraType = cameraType

if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "setup")
}

print("setup \(Thread.current)")

DispatchQueue.main.async {
self.cameraPreview.session = self.session
self.cameraPreview.previewLayer.videoGravity = .resizeAspectFill
Expand All @@ -97,31 +79,16 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
// Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
// so that the main queue isn't blocked, which keeps the UI responsive.
sessionQueue.async {
if #available(iOS 12.0, *) {
os_signpost(.event, log: log, name: "Processing", "setupCaptureSession")
}

self.setupResult = self.setupCaptureSession(supportedBarcodeType: supportedBarcodeType)

self.addObservers()

if self.setupResult == .success {
print("---- startRunning")
if #available(iOS 12.0, *) {
os_signpost(.event, log: log, name: "Processing", "startRunning")
}
self.session.startRunning()
if #available(iOS 12.0, *) {
os_signpost(.event, log: log, name: "Processing", "finished startRunning")
}

// We need to reapply the configuration after starting the camera
self.update(torchMode: self.torchMode)
}

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "setup")
}
}
}

Expand All @@ -136,10 +103,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega

func focus(at touchPoint: CGPoint, focusBehavior: FocusBehavior) {
DispatchQueue.main.async {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "focusat")
}

let devicePoint = self.cameraPreview.previewLayer.captureDevicePointConverted(fromLayerPoint: touchPoint)

self.sessionQueue.async {
Expand All @@ -155,10 +118,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
exposeWithMode: focusBehavior.exposureMode,
atDevicePoint: devicePoint,
isSubjectAreaChangeMonitoringEnabled: focusBehavior.isSubjectAreaChangeMonitoringEnabled)

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "focusat")
}
}
}
}
Expand All @@ -167,18 +126,9 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
self.torchMode = torchMode

sessionQueue.asyncAfter(deadline: .now() + 0.1) {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "torchMode")
}

if (self.videoDeviceInput?.device.torchMode != torchMode.avTorchMode) {
print("update torchMode from from \(self.videoDeviceInput?.device.torchMode.rawValue) to \(torchMode.avTorchMode.rawValue)")
self.videoDeviceInput?.device.setTorchMode(torchMode.avTorchMode)
}

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "torchMode")
}
}
}

Expand All @@ -189,19 +139,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
func update(cameraType: CameraType) {
self.cameraType = cameraType

if #available(iOS 12.0, *) {
os_signpost(.event, log: log, name: "update cameraType")
}

sessionQueue.async {
if self.videoDeviceInput?.device.position == cameraType.avPosition {
return
}

if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "cameraType")
}

// Avoid chaining device inputs when camera input is denied by the user, since both front and rear vido input devices will be nil
guard self.setupResult == .success,
let currentViewDeviceInput = self.videoDeviceInput,
Expand Down Expand Up @@ -229,10 +171,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega

// We need to reapply the configuration after reloading the camera
self.update(torchMode: self.torchMode)

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "cameraType")
}
}
}

Expand Down Expand Up @@ -283,12 +221,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
self.onBarcodeRead = onBarcodeRead

sessionQueue.async {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "isBarcodeScannerEnabled")
}

print("--------- isBarcodeScannerEnabled")

let newTypes: [AVMetadataObject.ObjectType]
if isEnabled && onBarcodeRead != nil {
let availableTypes = self.metadataOutput.availableMetadataObjectTypes
Expand All @@ -298,19 +230,11 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
}

if self.metadataOutput.metadataObjectTypes != newTypes {
if #available(iOS 12.0, *) {
os_signpost(.event, log: log, name: "update metadataObjectTypes")
}

self.metadataOutput.metadataObjectTypes = newTypes

// Setting metadataObjectTypes reloads the camera, we need to reapply the configuration
self.update(torchMode: self.torchMode)
}

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "isBarcodeScannerEnabled")
}
}
}

Expand All @@ -320,15 +244,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
self.scannerFrameSize = scannerFrameSize

self.sessionQueue.async {
if #available(iOS 12.0, *) {
os_signpost(.begin, log: log, name: "scannerFrameSize")
}

if !self.session.isRunning {
print("setting rectOfInterest while session not running wouldn't work")
if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "scannerFrameSize")
}
return
}

Expand All @@ -337,24 +253,12 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega

self.sessionQueue.async {
if (self.metadataOutput.rectOfInterest == visibleRect) {
if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "scannerFrameSize")
}
return
}

print("------ update scannerFrameSize from \(self.metadataOutput.rectOfInterest) to \(visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1))")

if #available(iOS 12.0, *) {
os_signpost(.event, log: log, name: "update scannerFrameSize")
}
self.metadataOutput.rectOfInterest = visibleRect ?? CGRect(x: 0, y: 0, width: 1, height: 1)
// We need to reapply the configuration after touching the metadataOutput
self.update(torchMode: self.torchMode)

if #available(iOS 12.0, *) {
os_signpost(.end, log: log, name: "scannerFrameSize")
}
}
}
}
Expand All @@ -369,8 +273,6 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega
return
}

print("----------- \(codeStringValue)")

onBarcodeRead?(codeStringValue)
}

Expand Down
3 changes: 0 additions & 3 deletions ios/ReactNativeCameraKit/ScannerInterfaceView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,12 @@ class ScannerInterfaceView: UIView {
frameView.frame = CGRect(x: 0, y: 0, width: bounds.size.width - 2 * frameOffset, height: frameHeight)
frameView.center = center

print("--- draw rect scannerInterface \(frameView.frame)")

updateOverlaySize(frameView.frame)
}

// MARK: - Public

var frameSize: CGRect {
print("--- frameView.frame \(frameView.frame)")
return frameView.frame
}

Expand Down
2 changes: 0 additions & 2 deletions ios/ReactNativeCameraKit/SimulatorCamera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@
import AVFoundation
import UIKit

import os.signpost

/*
* Fake camera implementation to be used on simulator
*/
Expand Down
1 change: 0 additions & 1 deletion ios/ReactNativeCameraKit/SimulatorPreviewView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ class SimulatorPreviewView: UIView {
}

func randomize() {
print("randomize \(Thread.current)")
layer.backgroundColor = UIColor(hue: CGFloat(Double.random(in: 0...1)), saturation: 1.0, brightness: 1.0, alpha: 1.0).cgColor
balloonLayer.removeFromSuperlayer()
balloonLayer = CALayer()
Expand Down

0 comments on commit a23aa47

Please sign in to comment.