diff --git a/ios/ReactNativeCameraKit/CKCameraManager.m b/ios/ReactNativeCameraKit/CKCameraManager.m index 57503ba39..63a409797 100644 --- a/ios/ReactNativeCameraKit/CKCameraManager.m +++ b/ios/ReactNativeCameraKit/CKCameraManager.m @@ -41,5 +41,6 @@ @interface RCT_EXTERN_MODULE(CKCameraManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(zoomMode, CKZoomMode) RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber) RCT_EXPORT_VIEW_PROPERTY(maxZoom, NSNumber) +RCT_EXPORT_VIEW_PROPERTY(shutterPhotoSound, BOOL) @end diff --git a/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm b/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm index 356d35be7..da192babf 100644 --- a/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm +++ b/ios/ReactNativeCameraKit/CKCameraViewComponentView.mm @@ -243,8 +243,12 @@ - (void)updateProps:(const Props::Shared &)props oldProps:(const Props::Shared & _view.barcodeFrameSize = @{@"width": @(barcodeWidth), @"height": @(barcodeHeight)}; [changedProps addObject:@"barcodeFrameSize"]; } - - + bool shutterPhotoSound = newProps.shutterPhotoSound; + if (shutterPhotoSound != _view.shutterPhotoSound) { + _view.shutterPhotoSound = shutterPhotoSound; + [changedProps addObject:@"shutterPhotoSound"]; + } + [super updateProps:props oldProps:oldProps]; [_view didSetProps:changedProps]; } diff --git a/ios/ReactNativeCameraKit/CameraProtocol.swift b/ios/ReactNativeCameraKit/CameraProtocol.swift index 35bcbec50..5579e9e33 100644 --- a/ios/ReactNativeCameraKit/CameraProtocol.swift +++ b/ios/ReactNativeCameraKit/CameraProtocol.swift @@ -7,6 +7,7 @@ import AVFoundation protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { var previewView: UIView { get } + var imageBuffer: CMSampleBuffer? { get } func setup(cameraType: CameraType, supportedBarcodeType: [CodeFormat]) func cameraRemovedFromSuperview() @@ -21,6 +22,7 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func update(resizeMode: ResizeMode) func update(maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization?) func update(barcodeFrameSize: CGSize?) + func update(shutterPhotoSound: Bool) func zoomPinchStart() func zoomPinchChange(pinchScale: CGFloat) @@ -31,7 +33,8 @@ protocol CameraProtocol: AnyObject, FocusInterfaceViewDelegate { func update(scannerFrameSize: CGRect?) - func capturePicture(onWillCapture: @escaping () -> Void, + func capturePicture(captureOptions: CaptureOptions, + onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) } diff --git a/ios/ReactNativeCameraKit/CameraView.swift b/ios/ReactNativeCameraKit/CameraView.swift index ad161e5ab..77e395664 100644 --- a/ios/ReactNativeCameraKit/CameraView.swift +++ b/ios/ReactNativeCameraKit/CameraView.swift @@ -38,6 +38,7 @@ public class CameraView: UIView { @objc public var flashMode: FlashMode = .auto @objc public var torchMode: TorchMode = .off @objc public var maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization = .balanced + @objc public var shutterPhotoSound: Bool = true // ratio overlay @objc public var ratioOverlay: String? @objc public var ratioOverlayColor: UIColor? @@ -65,6 +66,14 @@ public class CameraView: UIView { var eventInteraction: Any? = nil + var isSimulator: Bool { +#if targetEnvironment(simulator) + true +#else + false +#endif + } + // MARK: - Setup // This is used to delay camera setup until we have both granted permission & received default props @@ -288,31 +297,38 @@ public class CameraView: UIView { if changedProps.contains("maxZoom") { camera.update(maxZoom: maxZoom?.doubleValue) } + + if changedProps.contains("shutterPhotoSound") { + camera.update(shutterPhotoSound: shutterPhotoSound) + } } // MARK: Public @objc public func capture(onSuccess: @escaping (_ imageObject: [String: Any]) -> Void, - onError: @escaping (_ error: String) -> Void) { - camera.capturePicture(onWillCapture: { [weak self] in - // Flash/dim preview to indicate shutter action - DispatchQueue.main.async { - self?.camera.previewView.alpha = 0 - UIView.animate(withDuration: 0.35, animations: { - self?.camera.previewView.alpha = 1 - }) - } - }, onSuccess: { [weak self] imageData, thumbnailData, dimensions in - DispatchQueue.global(qos: .default).async { - self?.writeCaptured(imageData: imageData, - thumbnailData: thumbnailData, - dimensions: dimensions, - onSuccess: onSuccess, - onError: onError) - - self?.focusInterfaceView.resetFocus() - } - }, onError: onError) + onError: @escaping (_ error: String) -> Void) { + let captureOptions = CaptureOptions(shutterPhotoSound: shutterPhotoSound) + + camera.capturePicture(captureOptions: captureOptions, + onWillCapture: { [weak self] in + // Flash/dim preview to indicate shutter action + DispatchQueue.main.async { + self?.camera.previewView.alpha = 0 + UIView.animate(withDuration: 0.35, animations: { + self?.camera.previewView.alpha = 1 + }) + } + }, onSuccess: { [weak self] imageData, thumbnailData, dimensions in + DispatchQueue.global(qos: .default).async { + self?.writeCaptured(imageData: imageData, + thumbnailData: thumbnailData, + dimensions: dimensions, + onSuccess: onSuccess, + onError: onError) + + self?.focusInterfaceView.resetFocus() + } + }, onError: onError) } // MARK: - Private Helper diff --git a/ios/ReactNativeCameraKit/RealCamera.swift b/ios/ReactNativeCameraKit/RealCamera.swift index cce97cb28..5ee6d97a4 100644 --- a/ios/ReactNativeCameraKit/RealCamera.swift +++ b/ios/ReactNativeCameraKit/RealCamera.swift @@ -13,8 +13,9 @@ import CoreMotion * Real camera implementation that uses AVFoundation */ // swiftlint:disable:next type_body_length -class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate { +class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelegate, AVCaptureVideoDataOutputSampleBufferDelegate { var previewView: UIView { cameraPreview } + private(set) var imageBuffer: CMSampleBuffer? private let cameraPreview = RealPreviewView(frame: .zero) private let session = AVCaptureSession() @@ -29,6 +30,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var videoDeviceInput: AVCaptureDeviceInput? private let photoOutput = AVCapturePhotoOutput() private let metadataOutput = AVCaptureMetadataOutput() + private let videoDataOutput = AVCaptureVideoDataOutput() private var resizeMode: ResizeMode = .contain private var flashMode: FlashMode = .auto @@ -44,6 +46,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var lastOnZoom: Double? private var zoom: Double? private var maxZoom: Double? + private var shutterPhotoSound: Bool? = true private var deviceOrientation = UIDeviceOrientation.unknown private var motionManager: CMMotionManager? @@ -55,7 +58,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega private var inProgressPhotoCaptureDelegates = [Int64: PhotoCaptureDelegate]() // MARK: - Lifecycle - + #if !targetEnvironment(macCatalyst) override init() { super.init() @@ -76,7 +79,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // Mac Catalyst doesn't support device orientation notifications } #endif - + @available(*, unavailable) required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") @@ -269,7 +272,7 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega func update(flashMode: FlashMode) { self.flashMode = flashMode } - + func update(maxPhotoQualityPrioritization: MaxPhotoQualityPrioritization?) { guard maxPhotoQualityPrioritization != self.maxPhotoQualityPrioritization else { return } if #available(iOS 13.0, *) { @@ -328,48 +331,62 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } - func capturePicture(onWillCapture: @escaping () -> Void, + func update(shutterPhotoSound: Bool) { + self.shutterPhotoSound = shutterPhotoSound + } + + func capturePicture(captureOptions: CaptureOptions, + onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) { - /* - Retrieve the video preview layer's video orientation on the main queue before - entering the session queue. Do this to ensure that UI elements are accessed on - the main thread and session configuration is done on the session queue. - */ - DispatchQueue.main.async { - let videoPreviewLayerOrientation = - self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation - self.sessionQueue.async { - if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { - photoOutputConnection.videoOrientation = videoPreviewLayerOrientation - } + let shutterPhotoSound = captureOptions.shutterPhotoSound - let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) - if #available(iOS 13.0, *) { - settings.photoQualityPrioritization = self.photoOutput.maxPhotoQualityPrioritization - } + if shutterPhotoSound == false { + DispatchQueue.global(qos: .userInitiated).async { [weak self] in + self?.silentCapture(onSuccess: onSuccess, onError: onError) + } + } else { + /* + Retrieve the video preview layer's video orientation on the main queue before + entering the session queue. Do this to ensure that UI elements are accessed on + the main thread and session configuration is done on the session queue. + */ + DispatchQueue.main.async { + let videoPreviewLayerOrientation = + self.videoOrientation(from: self.deviceOrientation) ?? self.cameraPreview.previewLayer.connection?.videoOrientation - if self.videoDeviceInput?.device.isFlashAvailable == true { - settings.flashMode = self.flashMode.avFlashMode - } + self.sessionQueue.async { + if let photoOutputConnection = self.photoOutput.connection(with: .video), let videoPreviewLayerOrientation { + photoOutputConnection.videoOrientation = videoPreviewLayerOrientation + } + + let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]) + if #available(iOS 13.0, *) { + settings.photoQualityPrioritization = self.photoOutput.maxPhotoQualityPrioritization + } - let photoCaptureDelegate = PhotoCaptureDelegate( - with: settings, - onWillCapture: onWillCapture, - onCaptureSuccess: { uniqueID, imageData, thumbnailData, dimensions in - self.inProgressPhotoCaptureDelegates[uniqueID] = nil - - onSuccess(imageData, thumbnailData, dimensions) - }, - onCaptureError: { uniqueID, errorMessage in - self.inProgressPhotoCaptureDelegates[uniqueID] = nil - onError(errorMessage) + if self.videoDeviceInput?.device.isFlashAvailable == true { + settings.flashMode = self.flashMode.avFlashMode } - ) - self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate - self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) + let photoCaptureDelegate = PhotoCaptureDelegate( + with: settings, + onWillCapture: onWillCapture, + onCaptureSuccess: { uniqueID, imageData, thumbnailData, dimensions in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + + onSuccess(imageData, thumbnailData, dimensions) + }, + onCaptureError: { uniqueID, errorMessage in + self.inProgressPhotoCaptureDelegates[uniqueID] = nil + onError(errorMessage) + } + ) + + self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate + self.photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) + } } } } @@ -442,6 +459,14 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega onBarcodeRead?(codeStringValue,barcodeType) } + // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + if output == videoDataOutput { + imageBuffer = sampleBuffer + } + } + // MARK: - Private private func videoOrientation(from deviceOrientation: UIDeviceOrientation) -> AVCaptureVideoOrientation? { @@ -505,13 +530,13 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega defer { session.commitConfiguration() } session.sessionPreset = .photo - + if #available(iOS 13.0, *) { if let maxPhotoQualityPrioritization { photoOutput.maxPhotoQualityPrioritization = maxPhotoQualityPrioritization.avQualityPrioritization } } - + if session.canAddInput(videoDeviceInput) { session.addInput(videoDeviceInput) @@ -544,7 +569,19 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega metadataOutput.metadataObjectTypes = filteredTypes } - + + if session.canAddOutput(videoDataOutput) { + session.addOutput(videoDataOutput) + videoDataOutput.alwaysDiscardsLateVideoFrames = true + videoDataOutput.setSampleBufferDelegate(self, queue: sessionQueue) + + if let connection = videoDataOutput.connection(with: .video) { + connection.videoOrientation = .portrait + } + } else { + return .sessionConfigurationFailed + } + return .success } @@ -646,6 +683,19 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } } + // MARK: Private image orientation from device orientation + + private func imageOrientation(from deviceOrientation: UIDeviceOrientation) -> UIImage.Orientation { + switch deviceOrientation { + case .portrait: return .up + case .portraitUpsideDown: return .down + case .landscapeLeft: return .left + case .landscapeRight: return .right + case .unknown: return .up + @unknown default: return .up + } + } + // MARK: Private observers private func addObservers() { @@ -700,10 +750,18 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega } else { interfaceOrientation = UIApplication.shared.statusBarOrientation } - self.cameraPreview.previewLayer.connection?.videoOrientation = self.videoOrientation(from: interfaceOrientation) + let videoOrientation = self.videoOrientation(from: interfaceOrientation) + self.cameraPreview.previewLayer.connection?.videoOrientation = videoOrientation + + if let videoDataOutputConnection = videoDataOutput.connection(with: .video) { + videoDataOutputConnection.videoOrientation = videoOrientation + } #else // Mac Catalyst always uses portrait orientation self.cameraPreview.previewLayer.connection?.videoOrientation = .portrait + if let videoDataOutputConnection = videoDataOutput.connection(with: .video) { + videoDataOutputConnection.videoOrientation = .portrait + } #endif } @@ -744,4 +802,43 @@ class RealCamera: NSObject, CameraProtocol, AVCaptureMetadataOutputObjectsDelega // FIXME: Missing use of showResumeButton } + + private func silentCapture(onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, + onError: @escaping (_ message: String) -> Void) { + + guard let imageBuffer = self.imageBuffer, + let cvPixelBuffer = CMSampleBufferGetImageBuffer(imageBuffer) else { + DispatchQueue.main.async { + onError("Failed to get image buffer") + } + return + } + + let ciImage = CIImage(cvPixelBuffer: cvPixelBuffer) + let ciImageContext = CIContext() + + guard let cgImage = ciImageContext.createCGImage(ciImage, from: ciImage.extent) else { + DispatchQueue.main.async { + onError("Failed to create CGImage") + } + return + } + + let orientation = self.imageOrientation(from: self.deviceOrientation) + let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation) + + guard let imageData = image.jpegData(compressionQuality: 0.85) else { + DispatchQueue.main.async { + onError("Failed to convert image to data") + } + return + } + + let thumbnailData = image.jpegData(compressionQuality: 0.5) + let dimensions = CMVideoDimensions(width: Int32(image.size.width), height: Int32(image.size.height)) + + DispatchQueue.main.async { + onSuccess(imageData, thumbnailData, dimensions) + } + } } diff --git a/ios/ReactNativeCameraKit/SimulatorCamera.swift b/ios/ReactNativeCameraKit/SimulatorCamera.swift index 854311553..2d8b3fc16 100644 --- a/ios/ReactNativeCameraKit/SimulatorCamera.swift +++ b/ios/ReactNativeCameraKit/SimulatorCamera.swift @@ -19,7 +19,9 @@ class SimulatorCamera: CameraProtocol { private var maxZoom: Double? private var resizeMode: ResizeMode = .contain private var barcodeFrameSize: CGSize? + private var shutterPhotoSound: Bool? = true + private(set) var imageBuffer: CMSampleBuffer? var previewView: UIView { mockPreview } private var fakeFocusFinishedTimer: Timer? @@ -178,9 +180,14 @@ class SimulatorCamera: CameraProtocol { onBarcodeRead: ((_ barcode: String,_ codeFormat:CodeFormat) -> Void)?) {} func update(scannerFrameSize: CGRect?) {} - func capturePicture(onWillCapture: @escaping () -> Void, + func capturePicture(captureOptions: CaptureOptions, + onWillCapture: @escaping () -> Void, onSuccess: @escaping (_ imageData: Data, _ thumbnailData: Data?, _ dimensions: CMVideoDimensions) -> Void, onError: @escaping (_ message: String) -> Void) { + /* + We don't need to care about captureOptions.shutterPhotoSound in simulator. + because simulator doesn't have shutter sound. + */ onWillCapture() DispatchQueue.main.async { @@ -201,4 +208,8 @@ class SimulatorCamera: CameraProtocol { func update(barcodeFrameSize: CGSize?) { self.barcodeFrameSize = barcodeFrameSize } + + func update(shutterPhotoSound: Bool) { + self.shutterPhotoSound = shutterPhotoSound + } } diff --git a/ios/ReactNativeCameraKit/Types.swift b/ios/ReactNativeCameraKit/Types.swift index d5de515c9..6ad2948f5 100644 --- a/ios/ReactNativeCameraKit/Types.swift +++ b/ios/ReactNativeCameraKit/Types.swift @@ -195,3 +195,11 @@ extension AVCaptureDevice.FocusMode: CustomStringConvertible { } } } + +public struct CaptureOptions { + public var shutterPhotoSound: Bool = true + + public init(shutterPhotoSound: Bool = true) { + self.shutterPhotoSound = shutterPhotoSound + } +} \ No newline at end of file