From c78c4564d7ef3a311f6bdea6a78639e085a6210f Mon Sep 17 00:00:00 2001 From: hoonjoo-park Date: Mon, 19 May 2025 19:10:03 +0900 Subject: [PATCH 1/5] [iOS-ShutterPhotoSound] feat: add support for shutterPhotoSound prop in iOS CameraView --- ios/ReactNativeCameraKit/CKCameraManager.m | 1 + ios/ReactNativeCameraKit/CKCameraViewComponentView.mm | 8 ++++++-- ios/ReactNativeCameraKit/CameraProtocol.swift | 1 + ios/ReactNativeCameraKit/CameraView.swift | 5 +++++ ios/ReactNativeCameraKit/RealCamera.swift | 5 +++++ ios/ReactNativeCameraKit/SimulatorCamera.swift | 5 +++++ 6 files changed, 23 insertions(+), 2 deletions(-) diff --git a/ios/ReactNativeCameraKit/CKCameraManager.m b/ios/ReactNativeCameraKit/CKCameraManager.m index 57503ba39..63a409797 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.m +++ b/ios/ReactNativeCameraKit/CKCameraManager.m @@ -41,5 +41,6 @@ @interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKZoomMode) RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber) RCT_EXPORT_VIEW_PROPERTY(maxZoom, NSNumber) +RCT_EXPORT_VIEW_PROPERTY(shutterPhotoSound, BOOL) @end diff --git a/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm b/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm index 356d35be7..da192babf 100644 --- a/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm +++ b/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm @@ -243,8 +243,12 @@ - (void)updateProps:(const Props::Shared &)props oldProps:(const Props::Shared & _view.barcodeFrameSize = @{@"width": @(barcodeWidth), @"height": @(barcodeHeight)}; [changedProps addObject:@"barcodeFrameSize"]; } - - + bool shutterPhotoSound = newProps.shutterPhotoSound; + if (shutterPhotoSound != _view.shutterPhotoSound) { + _view.shutterPhotoSound = shutterPhotoSound; + [changedProps addObject:@"shutterPhotoSound"]; + } + [super updateProps:props oldProps:oldProps]; [_view didSetProps:changedProps]; } diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index 35bcbec50..b9f1fed48 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -21,6 +21,7 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func update(resizeMode: ResizeMode) func update(maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization?) func update(barcodeFrameSize: CGSize?) + func update(shutterPhotoSound: Bool) func zoomPinchStart() func zoomPinchChange(pinchScale: CGFloat) diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index ad161e5ab..6cfd2c322 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -38,6 +38,7 @@ public class CameraView: UIView { @objc public var flashMode: FlashMode = .auto @objc public var torchMode: TorchMode = .off @objc public var maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization = .balanced + @objc public var shutterPhotoSound: Bool = true // ratio overlay @objc public var ratioOverlay: String? @objc public var ratioOverlayColor: UIColor? @@ -288,6 +289,10 @@ public class CameraView: UIView { if changedProps.contains("maxZoom") { camera.update(maxZoom: maxZoom?.doubleValue) } + + if changedProps.contains("shutterPhotoSound") { + camera.update(shutterPhotoSound: shutterPhotoSound) + } } // MARK: Public diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index cce97cb28..aed7ab47f 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -44,6 +44,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var lastOnZoom: Double? private var zoom: Double? private var maxZoom: Double? + private var shutterPhotoSound: Bool? = true private var deviceOrientation = UIDeviceOrientation.unknown private var motionManager: CMMotionManager? @@ -328,6 +329,10 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } + func update(shutterPhotoSound: Bool) { + self.shutterPhotoSound = shutterPhotoSound + } + func capturePicture(onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) { diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 854311553..09a677b73 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -19,6 +19,7 @@ class SimulatorCamera: CameraProtocol { private var maxZoom: Double? private var resizeMode: ResizeMode = .contain private var barcodeFrameSize: CGSize? + private var shutterPhotoSound: Bool? = true var previewView: UIView { mockPreview } @@ -201,4 +202,8 @@ class SimulatorCamera: CameraProtocol { func update(barcodeFrameSize: CGSize?) { self.barcodeFrameSize = barcodeFrameSize } + + func update(shutterPhotoSound: Bool) { + self.shutterPhotoSound = shutterPhotoSound + } } From 6da6f57feb53b43b1bd882a835ad78a047db89d2 Mon Sep 17 00:00:00 2001 From: hoonjoo-park Date: Mon, 19 May 2025 19:13:37 +0900 Subject: [PATCH 2/5] feat: implement silentCapture using AVCaptureVideoDataOutput --- ios/ReactNativeCameraKit/CameraProtocol.swift | 1 + ios/ReactNativeCameraKit/CameraView.swift | 133 +++++++++++++++--- ios/ReactNativeCameraKit/RealCamera.swift | 36 ++++- .../SimulatorCamera.swift | 1 + 4 files changed, 148 insertions(+), 23 deletions(-) diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index b9f1fed48..24aaf7e2e 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -7,6 +7,7 @@ import AVFoundation protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { var previewView: UIView { get } + var imageBuffer: CMSampleBuffer? { get } func setup(cameraType: CameraType, supportedBarcodeType: [CodeFormat]) func cameraRemovedFromSuperview() diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 6cfd2c322..95d629819 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -66,6 +66,14 @@ public class CameraView: UIView { var eventInteraction: Any? = nil + var isSimulator: Bool { +#if targetEnvironment(simulator) + true +#else + false +#endif + } + // MARK: - Setup // This is used to delay camera setup until we have both granted permission & received default props @@ -298,26 +306,30 @@ public class CameraView: UIView { // MARK: Public @objc public func capture(onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, - onError: @escaping (_ error: String) -> Void) { - camera.capturePicture(onWillCapture: { [weak self] in - // Flash/dim preview to indicate shutter action - DispatchQueue.main.async { - self?.camera.previewView.alpha = 0 - UIView.animate(withDuration: 0.35, animations: { - self?.camera.previewView.alpha = 1 - }) - } - }, onSuccess: { [weak self] imageData, thumbnailData, dimensions in - DispatchQueue.global(qos: .default).async { - self?.writeCaptured(imageData: imageData, - thumbnailData: thumbnailData, - dimensions: dimensions, - onSuccess: onSuccess, - onError: onError) - - self?.focusInterfaceView.resetFocus() - } - }, onError: onError) + onError: @escaping (_ error: String) -> Void) { + if isSimulator || shutterPhotoSound { + camera.capturePicture(onWillCapture: { [weak self] in + // Flash/dim preview to indicate shutter action + DispatchQueue.main.async { + self?.camera.previewView.alpha = 0 + UIView.animate(withDuration: 0.35, animations: { + self?.camera.previewView.alpha = 1 + }) + } + }, onSuccess: { [weak self] imageData, thumbnailData, dimensions in + DispatchQueue.global(qos: .default).async { + self?.writeCaptured(imageData: imageData, + thumbnailData: thumbnailData, + dimensions: dimensions, + onSuccess: onSuccess, + onError: onError) + + self?.focusInterfaceView.resetFocus() + } + }, onError: onError) + } else { + self.silentCapture(onSuccess: onSuccess, onError: onError) + } } // MARK: - Private Helper @@ -428,6 +440,87 @@ public class CameraView: UIView { onReadCode?(["codeStringValue": barcode,"codeFormat":codeFormat.rawValue]) } + private func silentCapture(onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, + onError: @escaping (_ error: String) -> Void) { + self.camera.previewView.alpha = 0 + UIView.animate(withDuration: 0.35) { + self.camera.previewView.alpha = 1 + } + + guard let imageBuffer = self.camera.imageBuffer, + let cvPixelBuffer = CMSampleBufferGetImageBuffer(imageBuffer) else { + onError("Failed to get image buffer") + return + } + + let ciImage = CIImage(cvPixelBuffer: cvPixelBuffer) + let ciImageContext = CIContext() + + guard let cgImage = ciImageContext.createCGImage(ciImage, from: ciImage.extent) else { + onError("Failed to create CGImage") + return + } + + let isPhone = UIDevice.current.userInterfaceIdiom == .phone + let orientation = self.imageOrientation( + deviceOrientation: isPhone ? self.screenOrientation : UIDevice.current.orientation, + cameraPosition: self.cameraType.avPosition) + + let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation) + + // convert image to JPEG + guard let imageData = image.jpegData(compressionQuality: 0.85) else { + onError("Failed to convert image to data") + return + } + + let thumbnailData = image.jpegData(compressionQuality: 0.5) + let dimensions = CMVideoDimensions(width: Int32(image.size.width), height: Int32(image.size.height)) + + DispatchQueue.global(qos: .default).async { [weak self] in + self?.writeCaptured( + imageData: imageData, + thumbnailData: thumbnailData, + dimensions: dimensions, + onSuccess: onSuccess, + onError: onError + ) + + self?.focusInterfaceView.resetFocus() + } + } + + private func imageOrientation(deviceOrientation: UIDeviceOrientation, cameraPosition: AVCaptureDevice.Position) -> UIImage.Orientation { + switch deviceOrientation { + case .portrait: + return cameraPosition == .front ? .upMirrored : .up + case .portraitUpsideDown: + return cameraPosition == .front ? .downMirrored : .down + case .landscapeLeft: + return cameraPosition == .front ? .leftMirrored : .left + case .landscapeRight: + return cameraPosition == .front ? .rightMirrored : .right + default: + return cameraPosition == .front ? .upMirrored : .up + } + } + + private var screenOrientation: UIDeviceOrientation { + let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation + switch orientation { + case .portrait: + return .portrait + case .portraitUpsideDown: + return .portraitUpsideDown + case .landscapeLeft: + return .landscapeRight // UIInterfaceOrientation's landscapeLeft == UIDeviceOrientation's landscapeRight + case .landscapeRight: + return .landscapeLeft // vice versa! + default: + return .portrait + } + } + // MARK: - Gesture selectors @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index aed7ab47f..e0c7ed62c 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -13,8 +13,9 @@ import CoreMotion * Real camera implementation that uses AVFoundation */ // swiftlint:disable:next type_body_length -class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate { +class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate, AVCaptureVideoDataOutputSampleBufferDelegate { var previewView: UIView { cameraPreview } + private(set) var imageBuffer: CMSampleBuffer? private let cameraPreview = RealPreviewView(frame: .zero) private let session = AVCaptureSession() @@ -29,6 +30,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var videoDeviceInput: AVCaptureDeviceInput? private let photoOutput = AVCapturePhotoOutput() private let metadataOutput = AVCaptureMetadataOutput() + private let videoDataOutput = AVCaptureVideoDataOutput() private var resizeMode: ResizeMode = .contain private var flashMode: FlashMode = .auto @@ -447,6 +449,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega onBarcodeRead?(codeStringValue,barcodeType) } + // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + if output == videoDataOutput { + imageBuffer = sampleBuffer + } + } + // MARK: - Private private func videoOrientation(from deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? { @@ -549,7 +559,19 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega metadataOutput.metadataObjectTypes = filteredTypes } - + + if session.canAddOutput(videoDataOutput) { + session.addOutput(videoDataOutput) + videoDataOutput.alwaysDiscardsLateVideoFrames = true + videoDataOutput.setSampleBufferDelegate(self, queue: sessionQueue) + + if let connection = videoDataOutput.connection(with: .video) { + connection.videoOrientation = .portrait + } + } else { + return .sessionConfigurationFailed + } + return .success } @@ -705,10 +727,18 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } else { interfaceOrientation = UIApplication.shared.statusBarOrientation } - self.cameraPreview.previewLayer.connection?.videoOrientation = self.videoOrientation(from: interfaceOrientation) + let videoOrientation = self.videoOrientation(from: interfaceOrientation) + self.cameraPreview.previewLayer.connection?.videoOrientation = videoOrientation + + if let videoDataOutputConnection = videoDataOutput.connection(with: .video) { + videoDataOutputConnection.videoOrientation = videoOrientation + } #else // Mac Catalyst always uses portrait orientation self.cameraPreview.previewLayer.connection?.videoOrientation = .portrait + if let videoDataOutputConnection = videoDataOutput.connection(with: .video) { + videoDataOutputConnection.videoOrientation = .portrait + } #endif } diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 09a677b73..a5b14396e 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -21,6 +21,7 @@ class SimulatorCamera: CameraProtocol { private var barcodeFrameSize: CGSize? private var shutterPhotoSound: Bool? = true + private(set) var imageBuffer: CMSampleBuffer? var previewView: UIView { mockPreview } private var fakeFocusFinishedTimer: Timer? From 6bd2c4c276a36f9ef109254a1ec1871136a04f2f Mon Sep 17 00:00:00 2001 From: hoonjoo-park Date: Tue, 20 May 2025 14:17:55 +0900 Subject: [PATCH 3/5] type: add CaptureOptions struct to Types --- ios/ReactNativeCameraKit/Types.swift | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ios/ReactNativeCameraKit/Types.swift b/ios/ReactNativeCameraKit/Types.swift index d5de515c9..6ad2948f5 100644 --- a/ios/ReactNativeCameraKit/Types.swift +++ b/ios/ReactNativeCameraKit/Types.swift @@ -195,3 +195,11 @@ extension AVCaptureDevice.FocusMode: CustomStringConvertible { } } } + +public struct CaptureOptions { + public var shutterPhotoSound: Bool = true + + public init(shutterPhotoSound: Bool = true) { + self.shutterPhotoSound = shutterPhotoSound + } +} \ No newline at end of file From e8cf72b1144b60be3f8fe7429083de9d4badbdc3 Mon Sep 17 00:00:00 2001 From: hoonjoo-park Date: Tue, 20 May 2025 14:20:22 +0900 Subject: [PATCH 4/5] feat: implement imageOrientation function that converts device orientation to UIImage.Orientation --- ios/ReactNativeCameraKit/RealCamera.swift | 25 +++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index e0c7ed62c..8221e717e 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -58,7 +58,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var inProgressPhotoCaptureDelegates = [Int64: PhotoCaptureDelegate]() // MARK: - Lifecycle - + #if !targetEnvironment(macCatalyst) override init() { super.init() @@ -79,7 +79,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // Mac Catalyst doesn't support device orientation notifications } #endif - + @available(*, unavailable) required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") @@ -272,7 +272,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func update(flashMode: FlashMode) { self.flashMode = flashMode } - + func update(maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization?) { guard maxPhotoQualityPrioritization != self.maxPhotoQualityPrioritization else { return } if #available(iOS 13.0, *) { @@ -450,7 +450,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate - + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { if output == videoDataOutput { imageBuffer = sampleBuffer @@ -520,13 +520,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega defer { session.commitConfiguration() } session.sessionPreset = .photo - + if #available(iOS 13.0, *) { if let maxPhotoQualityPrioritization { photoOutput.maxPhotoQualityPrioritization = maxPhotoQualityPrioritization.avQualityPrioritization } } - + if session.canAddInput(videoDeviceInput) { session.addInput(videoDeviceInput) @@ -673,6 +673,19 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } + // MARK: Private image orientation from device orientation + + private func imageOrientation(from deviceOrientation: UIDeviceOrientation) -> UIImage.Orientation { + switch deviceOrientation { + case .portrait: return .up + case .portraitUpsideDown: return .down + case .landscapeLeft: return .left + case .landscapeRight: return .right + case .unknown: return .up + @unknown default: return .up + } + } + // MARK: Private observers private func addObservers() { From dddd923e182b204aa6619c9fb751134ba86cf10d Mon Sep 17 00:00:00 2001 From: hoonjoo-park Date: Tue, 20 May 2025 14:21:29 +0900 Subject: [PATCH 5/5] refactor: integrate silentCapture handling into Camera component --- ios/ReactNativeCameraKit/CameraProtocol.swift | 3 +- ios/ReactNativeCameraKit/CameraView.swift | 90 +------------- ios/ReactNativeCameraKit/RealCamera.swift | 117 +++++++++++++----- .../SimulatorCamera.swift | 7 +- 4 files changed, 95 insertions(+), 122 deletions(-) diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index 24aaf7e2e..5579e9e33 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -33,7 +33,8 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func update(scannerFrameSize: CGRect?) - func capturePicture(onWillCapture: @escaping () -> Void, + func capturePicture(captureOptions: CaptureOptions, + onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) } diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index 95d629819..77e395664 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -307,8 +307,10 @@ public class CameraView: UIView { @objc public func capture(onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, onError: @escaping (_ error: String) -> Void) { - if isSimulator || shutterPhotoSound { - camera.capturePicture(onWillCapture: { [weak self] in + let captureOptions = CaptureOptions(shutterPhotoSound: shutterPhotoSound) + + camera.capturePicture(captureOptions: captureOptions, + onWillCapture: { [weak self] in // Flash/dim preview to indicate shutter action DispatchQueue.main.async { self?.camera.previewView.alpha = 0 @@ -327,9 +329,6 @@ public class CameraView: UIView { self?.focusInterfaceView.resetFocus() } }, onError: onError) - } else { - self.silentCapture(onSuccess: onSuccess, onError: onError) - } } // MARK: - Private Helper @@ -440,87 +439,6 @@ public class CameraView: UIView { onReadCode?(["codeStringValue": barcode,"codeFormat":codeFormat.rawValue]) } - private func silentCapture(onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, - onError: @escaping (_ error: String) -> Void) { - self.camera.previewView.alpha = 0 - UIView.animate(withDuration: 0.35) { - self.camera.previewView.alpha = 1 - } - - guard let imageBuffer = self.camera.imageBuffer, - let cvPixelBuffer = CMSampleBufferGetImageBuffer(imageBuffer) else { - onError("Failed to get image buffer") - return - } - - let ciImage = CIImage(cvPixelBuffer: cvPixelBuffer) - let ciImageContext = CIContext() - - guard let cgImage = ciImageContext.createCGImage(ciImage, from: ciImage.extent) else { - onError("Failed to create CGImage") - return - } - - let isPhone = UIDevice.current.userInterfaceIdiom == .phone - let orientation = self.imageOrientation( - deviceOrientation: isPhone ? self.screenOrientation : UIDevice.current.orientation, - cameraPosition: self.cameraType.avPosition) - - let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation) - - // convert image to JPEG - guard let imageData = image.jpegData(compressionQuality: 0.85) else { - onError("Failed to convert image to data") - return - } - - let thumbnailData = image.jpegData(compressionQuality: 0.5) - let dimensions = CMVideoDimensions(width: Int32(image.size.width), height: Int32(image.size.height)) - - DispatchQueue.global(qos: .default).async { [weak self] in - self?.writeCaptured( - imageData: imageData, - thumbnailData: thumbnailData, - dimensions: dimensions, - onSuccess: onSuccess, - onError: onError - ) - - self?.focusInterfaceView.resetFocus() - } - } - - private func imageOrientation(deviceOrientation: UIDeviceOrientation, cameraPosition: AVCaptureDevice.Position) -> UIImage.Orientation { - switch deviceOrientation { - case .portrait: - return cameraPosition == .front ? .upMirrored : .up - case .portraitUpsideDown: - return cameraPosition == .front ? .downMirrored : .down - case .landscapeLeft: - return cameraPosition == .front ? .leftMirrored : .left - case .landscapeRight: - return cameraPosition == .front ? .rightMirrored : .right - default: - return cameraPosition == .front ? .upMirrored : .up - } - } - - private var screenOrientation: UIDeviceOrientation { - let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation - switch orientation { - case .portrait: - return .portrait - case .portraitUpsideDown: - return .portraitUpsideDown - case .landscapeLeft: - return .landscapeRight // UIInterfaceOrientation's landscapeLeft == UIDeviceOrientation's landscapeRight - case .landscapeRight: - return .landscapeLeft // vice versa! - default: - return .portrait - } - } - // MARK: - Gesture selectors @objc func handlePinchToZoomRecognizer(_ pinchRecognizer: UIPinchGestureRecognizer) { diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index 8221e717e..5ee6d97a4 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -335,48 +335,58 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega self.shutterPhotoSound = shutterPhotoSound } - func capturePicture(onWillCapture: @escaping () -> Void, + func capturePicture(captureOptions: CaptureOptions, + onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) { - /* - Retrieve the video preview layer's video orientation on the main queue before - entering the session queue. Do this to ensure that UI elements are accessed on - the main thread and session configuration is done on the session queue. - */ - DispatchQueue.main.async { - let videoPreviewLayerOrientation = - self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation - self.sessionQueue.async { - if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { - photoOutputConnection.videoOrientation = videoPreviewLayerOrientation - } + let shutterPhotoSound = captureOptions.shutterPhotoSound - let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) - if #available(iOS 13.0, *) { - settings.photoQualityPrioritization = self.photoOutput.maxPhotoQualityPrioritization - } + if shutterPhotoSound == false { + DispatchQueue.global(qos: .userInitiated).async { [weak self] in + self?.silentCapture(onSuccess: onSuccess, onError: onError) + } + } else { + /* + Retrieve the video preview layer's video orientation on the main queue before + entering the session queue. Do this to ensure that UI elements are accessed on + the main thread and session configuration is done on the session queue. + */ + DispatchQueue.main.async { + let videoPreviewLayerOrientation = + self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation - if self.videoDeviceInput?.device.isFlashAvailable == true { - settings.flashMode = self.flashMode.avFlashMode - } + self.sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } - let photoCaptureDelegate = PhotoCaptureDelegate( - with: settings, - onWillCapture: onWillCapture, - onCaptureSuccess: { uniqueID, imageData, thumbnailData, dimensions in - self.inProgressPhotoCaptureDelegates[uniqueID] = nil - - onSuccess(imageData, thumbnailData, dimensions) - }, - onCaptureError: { uniqueID, errorMessage in - self.inProgressPhotoCaptureDelegates[uniqueID] = nil - onError(errorMessage) + let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) + if #available(iOS 13.0, *) { + settings.photoQualityPrioritization = self.photoOutput.maxPhotoQualityPrioritization } - ) - self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate - self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) + if self.videoDeviceInput?.device.isFlashAvailable == true { + settings.flashMode = self.flashMode.avFlashMode + } + + let photoCaptureDelegate = PhotoCaptureDelegate( + with: settings, + onWillCapture: onWillCapture, + onCaptureSuccess: { uniqueID, imageData, thumbnailData, dimensions in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + + onSuccess(imageData, thumbnailData, dimensions) + }, + onCaptureError: { uniqueID, errorMessage in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + onError(errorMessage) + } + ) + + self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate + self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) + } } } } @@ -792,4 +802,43 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // FIXME: Missing use of showResumeButton } + + private func silentCapture(onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, + onError: @escaping (_ message: String) -> Void) { + + guard let imageBuffer = self.imageBuffer, + let cvPixelBuffer = CMSampleBufferGetImageBuffer(imageBuffer) else { + DispatchQueue.main.async { + onError("Failed to get image buffer") + } + return + } + + let ciImage = CIImage(cvPixelBuffer: cvPixelBuffer) + let ciImageContext = CIContext() + + guard let cgImage = ciImageContext.createCGImage(ciImage, from: ciImage.extent) else { + DispatchQueue.main.async { + onError("Failed to create CGImage") + } + return + } + + let orientation = self.imageOrientation(from: self.deviceOrientation) + let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation) + + guard let imageData = image.jpegData(compressionQuality: 0.85) else { + DispatchQueue.main.async { + onError("Failed to convert image to data") + } + return + } + + let thumbnailData = image.jpegData(compressionQuality: 0.5) + let dimensions = CMVideoDimensions(width: Int32(image.size.width), height: Int32(image.size.height)) + + DispatchQueue.main.async { + onSuccess(imageData, thumbnailData, dimensions) + } + } } diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index a5b14396e..2d8b3fc16 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -180,9 +180,14 @@ class SimulatorCamera: CameraProtocol { onBarcodeRead: ((_ barcode: String,_ codeFormat:CodeFormat) -> Void)?) {} func update(scannerFrameSize: CGRect?) {} - func capturePicture(onWillCapture: @escaping () -> Void, + func capturePicture(captureOptions: CaptureOptions, + onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) { + /* + We don't need to care about captureOptions.shutterPhotoSound in simulator. + because simulator doesn't have shutter sound. + */ onWillCapture() DispatchQueue.main.async {