From b4c654910593946981e3f6e423aa5e31d7c68f3d Mon Sep 17 00:00:00 2001 From: Bill Booth Date: Fri, 13 Jun 2025 20:07:24 -0700 Subject: [PATCH 1/5] Large Refactor - first stage This moves files around and starts to use the new architecture. Tests need to be sorted out still. There's a bunch we still need to move out of the UI code, ideally. --- Localizable.xcstrings | 7 + README.md | 24 + SnapSafe.xcodeproj/project.pbxproj | 36 +- SnapSafe.xctestplan | 8 +- SnapSafe/AppStateCoordinator.swift | 47 +- SnapSafe/CamControl.swift | 200 ----- SnapSafe/{Models => Data}/PhotoFilter.swift | 12 +- .../RepositoryEnvironment.swift | 131 +++ SnapSafe/Models/AppearanceMode.swift | 25 +- SnapSafe/{ => Models}/CameraModel.swift | 845 +++++++++--------- SnapSafe/Models/DetectedFace.swift | 79 +- SnapSafe/Models/MaskMode.swift | 20 +- SnapSafe/Models/PhotoMetadata.swift | 46 + SnapSafe/Models/SecurePhoto.swift | 238 ----- SnapSafe/PhotoExportUseCase.swift | 66 ++ SnapSafe/{ => Repositories}/AuthManager.swift | 0 .../EncryptionManager.swift | 0 .../{ => Repositories}/FaceDetector.swift | 33 +- SnapSafe/{ => Repositories}/FileManager.swift | 22 +- SnapSafe/{ => Repositories}/KeyManager.swift | 0 .../{ => Repositories}/LocationManager.swift | 33 +- .../MemoryManager.swift | 2 +- SnapSafe/{ => Repositories}/PINManager.swift | 34 +- .../ScreenCaptureManager.swift | 125 +-- .../Repositories/SecurePhotoRepository.swift | 169 ++++ .../DataSources/CacheDataSource.swift | 69 ++ .../DataSources/EncryptionDataSource.swift | 33 + .../DataSources/FileSystemDataSource.swift | 63 ++ .../DataSources/MetadataDataSource.swift | 93 ++ .../Protocols/DataSourceProtocols.swift | 58 ++ .../Repository/SecureImageRepository.swift | 293 ++++++ SnapSafe/SnapSafeApp.swift | 4 +- SnapSafe/UseCases/PhotoExportUseCase.swift | 66 ++ SnapSafe/UseCases/PhotoImportUseCase.swift | 59 ++ SnapSafe/UseCases/PhotoLibraryUseCase.swift | 41 + .../AuthenticationOverlayView.swift | 18 +- .../CameraView.swift} | 591 ++---------- SnapSafe/Views/ContentView.swift | 346 +++++++ SnapSafe/Views/FocusIndicatorView.swift | 46 + SnapSafe/{ => Views}/PINSetupView.swift | 42 +- .../{ => Views}/PINVerificationView.swift | 42 +- SnapSafe/Views/PhotoCell.swift | 63 ++ .../PhotoDetail/Components/FaceBoxView.swift | 18 +- .../FaceDetectionControlsView.swift | 16 +- .../Components/FaceDetectionOverlay.swift | 16 +- .../Components/PhotoControlsView.swift | 10 +- .../Components/ZoomLevelIndicator.swift | 6 +- .../Components/ZoomableImageView.swift | 112 +-- .../PhotoDetail/EnhancedPhotoDetailView.swift | 32 +- .../Views/PhotoDetail/ImageInfoView.swift | 181 ++-- .../Modifiers/ZoomableModifier.swift | 20 +- SnapSafe/Views/PhotoDetail/PhotoDetail.swift | 2 +- .../Views/PhotoDetail/PhotoDetailView.swift | 42 +- .../PhotoDetail/PhotoDetailViewModel.swift | 208 ++--- .../{ => Views}/PhotoDetailViewImpl.swift | 6 +- .../PrivacyShieldView.swift} | 17 +- .../Views/ScreenRecordingBlockerView.swift | 62 ++ SnapSafe/Views/ScreenshotTakenView.swift | 34 + SnapSafe/{ => Views}/SecureGalleryView.swift | 462 +++++----- SnapSafe/{ => Views}/SettingsView.swift | 67 +- SnapSafe/localize.xcstrings | 7 + SnapSafeTests/CameraModelTests.swift | 296 +++--- SnapSafeTests/EditedPhotoTrackingTests.swift | 81 +- SnapSafeTests/FaceDetectorTests.swift | 201 +++-- SnapSafeTests/GalleryFilteringTests.swift | 228 ----- SnapSafeTests/LocationManagerTests.swift | 221 +++-- SnapSafeTests/PINManagerTests.swift | 236 +++-- SnapSafeTests/PhotoDetailViewModelTests.swift | 496 ---------- SnapSafeTests/PhotoFilterTests.swift | 9 +- .../PhotoMetadataFilteringTests.swift | 238 ----- SnapSafeTests/SecureFileManagerTests.swift | 231 +++-- SnapSafeTests/SecurePhotoTests.swift | 660 -------------- SnapSafeTests/SnapSafeTests.swift | 9 +- SnapSafeUITests/SnapSafeUITests.swift | 3 +- .../SnapSafeUITestsLaunchTests.swift | 3 +- 75 files changed, 3726 insertions(+), 4633 deletions(-) create mode 100644 Localizable.xcstrings delete mode 100644 SnapSafe/CamControl.swift rename SnapSafe/{Models => Data}/PhotoFilter.swift (74%) create mode 100644 SnapSafe/DependencyInjection/RepositoryEnvironment.swift rename SnapSafe/{ => Models}/CameraModel.swift (69%) create mode 100644 SnapSafe/Models/PhotoMetadata.swift delete mode 100644 SnapSafe/Models/SecurePhoto.swift create mode 100644 SnapSafe/PhotoExportUseCase.swift rename SnapSafe/{ => Repositories}/AuthManager.swift (100%) rename SnapSafe/{ => Repositories}/EncryptionManager.swift (100%) rename SnapSafe/{ => Repositories}/FaceDetector.swift (95%) rename SnapSafe/{ => Repositories}/FileManager.swift (97%) rename SnapSafe/{ => Repositories}/KeyManager.swift (100%) rename SnapSafe/{ => Repositories}/LocationManager.swift (82%) rename SnapSafe/{Models => Repositories}/MemoryManager.swift (99%) rename SnapSafe/{ => Repositories}/PINManager.swift (95%) rename SnapSafe/{ => Repositories}/ScreenCaptureManager.swift (55%) create mode 100644 SnapSafe/Repositories/SecurePhotoRepository.swift create mode 100644 SnapSafe/Repository/DataSources/CacheDataSource.swift create mode 100644 SnapSafe/Repository/DataSources/EncryptionDataSource.swift create mode 100644 SnapSafe/Repository/DataSources/FileSystemDataSource.swift create mode 100644 SnapSafe/Repository/DataSources/MetadataDataSource.swift create mode 100644 SnapSafe/Repository/Protocols/DataSourceProtocols.swift create mode 100644 SnapSafe/Repository/SecureImageRepository.swift create mode 100644 SnapSafe/UseCases/PhotoExportUseCase.swift create mode 100644 SnapSafe/UseCases/PhotoImportUseCase.swift create mode 100644 SnapSafe/UseCases/PhotoLibraryUseCase.swift rename SnapSafe/{ => Views}/AuthenticationOverlayView.swift (82%) rename SnapSafe/{ContentView.swift => Views/CameraView.swift} (50%) create mode 100644 SnapSafe/Views/ContentView.swift create mode 100644 SnapSafe/Views/FocusIndicatorView.swift rename SnapSafe/{ => Views}/PINSetupView.swift (89%) rename SnapSafe/{ => Views}/PINVerificationView.swift (87%) create mode 100644 SnapSafe/Views/PhotoCell.swift rename SnapSafe/{ => Views}/PhotoDetailViewImpl.swift (87%) rename SnapSafe/{PrivacyShield.swift => Views/PrivacyShieldView.swift} (92%) create mode 100644 SnapSafe/Views/ScreenRecordingBlockerView.swift create mode 100644 SnapSafe/Views/ScreenshotTakenView.swift rename SnapSafe/{ => Views}/SecureGalleryView.swift (67%) rename SnapSafe/{ => Views}/SettingsView.swift (93%) create mode 100644 SnapSafe/localize.xcstrings delete mode 100644 SnapSafeTests/GalleryFilteringTests.swift delete mode 100644 SnapSafeTests/PhotoDetailViewModelTests.swift delete mode 100644 SnapSafeTests/PhotoMetadataFilteringTests.swift delete mode 100644 SnapSafeTests/SecurePhotoTests.swift diff --git a/Localizable.xcstrings b/Localizable.xcstrings new file mode 100644 index 0000000..fa56d16 --- /dev/null +++ b/Localizable.xcstrings @@ -0,0 +1,7 @@ +{ + "sourceLanguage" : "en", + "strings" : { + + }, + "version" : "1.0" +} \ No newline at end of file diff --git a/README.md b/README.md index a14bc28..3b28632 100644 --- a/README.md +++ b/README.md @@ -4,3 +4,27 @@ The camera app that minds its own business. [![iOS build](https://github.com/SecureCamera/SecureCameraIos/actions/workflows/ios.yml/badge.svg)](https://github.com/SecureCamera/SecureCameraIos/actions/workflows/ios.yml) [![CodeQL Advanced](https://github.com/SecureCamera/SecureCameraIos/actions/workflows/codeql.yml/badge.svg)](https://github.com/SecureCamera/SecureCameraIos/actions/workflows/codeql.yml) + + +# Recommended iOS Settings + +Apple provides a number of security features we can use on our devices to ensure the device is as secure as possible. This section outlines settings you can use to protect your device. + +## USB-Restricted Mode (iOS 18+) + +This option controls when the USB port is deactivated. By default, this should be in the most secure setting which is a setting of: **disabled**. This is important because it hardens the device from attacks via the USB port. The behavior of the USB port is dependent on the lock state of the device. + +| Condition | What the port will do | +| ---------------------------------- | ---------------------------------------------------------------------------------------------------- | +| Device has been unlocked < 1 h ago | Accept data from *known* accessories and hosts; prompt for “Trust This Computer” for new hosts | +| Locked ≥ 1 h | New accessories are blocked until the user unlocks; previously-trusted ones still work for 30 days ➀ | +| No accessory use for ≥ 3 d | The moment the device locks, *all* data connections are torn down; user must unlock to re-enable ➀ | + + +To check whether you have this setting disabled, go to: + +``` +Settings → Face ID & Passcode (or Touch ID & Passcode) → Allow Access When Locked → Accessories +``` + +Verify the setting is **disabled** (the default). diff --git a/SnapSafe.xcodeproj/project.pbxproj b/SnapSafe.xcodeproj/project.pbxproj index ebec6eb..c25330c 100644 --- a/SnapSafe.xcodeproj/project.pbxproj +++ b/SnapSafe.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 56; + objectVersion = 77; objects = { /* Begin PBXBuildFile section */ @@ -11,7 +11,6 @@ A91DBC552DE58191001F42ED /* DetectedFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC262DE58191001F42ED /* DetectedFace.swift */; }; A91DBC562DE58191001F42ED /* MaskMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC272DE58191001F42ED /* MaskMode.swift */; }; A91DBC572DE58191001F42ED /* MemoryManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC282DE58191001F42ED /* MemoryManager.swift */; }; - A91DBC582DE58191001F42ED /* PhotoFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC292DE58191001F42ED /* PhotoFilter.swift */; }; A91DBC592DE58191001F42ED /* SecurePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC2A2DE58191001F42ED /* SecurePhoto.swift */; }; A91DBC5A2DE58191001F42ED /* FaceBoxView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC2E2DE58191001F42ED /* FaceBoxView.swift */; }; A91DBC5B2DE58191001F42ED /* FaceDetectionControlsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC2F2DE58191001F42ED /* FaceDetectionControlsView.swift */; }; @@ -47,6 +46,7 @@ A91DBC792DE58191001F42ED /* SnapSafeApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC522DE58191001F42ED /* SnapSafeApp.swift */; }; A91DBC7A2DE58191001F42ED /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A91DBC2C2DE58191001F42ED /* Preview Assets.xcassets */; }; A91DBC7B2DE58191001F42ED /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A91DBC3F2DE58191001F42ED /* Assets.xcassets */; }; + A9445CBB2DE7DD7B0038119B /* PhotoMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9445CBA2DE7DD7B0038119B /* PhotoMetadata.swift */; }; A9B3E0882DCF1D3C003F1ED3 /* Dependencies in Frameworks */ = {isa = PBXBuildFile; productRef = A9B3E0872DCF1D3C003F1ED3 /* Dependencies */; }; A9B3E08A2DCF1D3C003F1ED3 /* DependenciesTestSupport in Frameworks */ = {isa = PBXBuildFile; productRef = A9B3E0892DCF1D3C003F1ED3 /* DependenciesTestSupport */; }; /* End PBXBuildFile section */ @@ -74,7 +74,6 @@ A91DBC262DE58191001F42ED /* DetectedFace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetectedFace.swift; sourceTree = ""; }; A91DBC272DE58191001F42ED /* MaskMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MaskMode.swift; sourceTree = ""; }; A91DBC282DE58191001F42ED /* MemoryManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MemoryManager.swift; sourceTree = ""; }; - A91DBC292DE58191001F42ED /* PhotoFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoFilter.swift; sourceTree = ""; }; A91DBC2A2DE58191001F42ED /* SecurePhoto.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecurePhoto.swift; sourceTree = ""; }; A91DBC2C2DE58191001F42ED /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; A91DBC2E2DE58191001F42ED /* FaceBoxView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceBoxView.swift; sourceTree = ""; }; @@ -110,23 +109,34 @@ A91DBC502DE58191001F42ED /* SecureGalleryView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecureGalleryView.swift; sourceTree = ""; }; A91DBC512DE58191001F42ED /* SettingsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsView.swift; sourceTree = ""; }; A91DBC522DE58191001F42ED /* SnapSafeApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SnapSafeApp.swift; sourceTree = ""; }; + A9445CBA2DE7DD7B0038119B /* PhotoMetadata.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoMetadata.swift; sourceTree = ""; }; A9DE37472DC5F34400679C2C /* SnapSafe.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SnapSafe.app; sourceTree = BUILT_PRODUCTS_DIR; }; A9DE37572DC5F34600679C2C /* SnapSafeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SnapSafeTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; A9DE37612DC5F34600679C2C /* SnapSafeUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SnapSafeUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ -/* Begin PBXGroup section */ +/* Begin PBXFileSystemSynchronizedRootGroup section */ + A9445CBC2DE820E30038119B /* Repositories */ = { + isa = PBXFileSystemSynchronizedRootGroup; + path = Repositories; + sourceTree = ""; + }; + A9445CBD2DE8210A0038119B /* Data */ = { + isa = PBXFileSystemSynchronizedRootGroup; + path = Data; + sourceTree = ""; + }; A9DE375A2DC5F34600679C2C /* SnapSafeTests */ = { - isa = PBXGroup; + isa = PBXFileSystemSynchronizedRootGroup; path = SnapSafeTests; sourceTree = ""; }; A9DE37642DC5F34600679C2C /* SnapSafeUITests */ = { - isa = PBXGroup; + isa = PBXFileSystemSynchronizedRootGroup; path = SnapSafeUITests; sourceTree = ""; }; -/* End PBXGroup section */ +/* End PBXFileSystemSynchronizedRootGroup section */ /* Begin PBXFrameworksBuildPhase section */ A9DE37442DC5F34400679C2C /* Frameworks */ = { @@ -161,8 +171,8 @@ A91DBC252DE58191001F42ED /* AppearanceMode.swift */, A91DBC262DE58191001F42ED /* DetectedFace.swift */, A91DBC272DE58191001F42ED /* MaskMode.swift */, + A91DBC432DE58191001F42ED /* CameraModel.swift */, A91DBC282DE58191001F42ED /* MemoryManager.swift */, - A91DBC292DE58191001F42ED /* PhotoFilter.swift */, A91DBC2A2DE58191001F42ED /* SecurePhoto.swift */, ); path = Models; @@ -222,6 +232,8 @@ A91DBC532DE58191001F42ED /* SnapSafe */ = { isa = PBXGroup; children = ( + A9445CBD2DE8210A0038119B /* Data */, + A9445CBC2DE820E30038119B /* Repositories */, A91DBC2B2DE58191001F42ED /* Models */, A91DBC2D2DE58191001F42ED /* Preview Content */, A91DBC3D2DE58191001F42ED /* Views */, @@ -230,7 +242,6 @@ A91DBC402DE58191001F42ED /* AuthenticationOverlayView.swift */, A91DBC412DE58191001F42ED /* AuthManager.swift */, A91DBC422DE58191001F42ED /* CamControl.swift */, - A91DBC432DE58191001F42ED /* CameraModel.swift */, A91DBC442DE58191001F42ED /* ContentView.swift */, A91DBC452DE58191001F42ED /* EncryptionManager.swift */, A91DBC462DE58191001F42ED /* FaceDetector.swift */, @@ -253,6 +264,7 @@ A9DE373E2DC5F34400679C2C = { isa = PBXGroup; children = ( + A9445CBA2DE7DD7B0038119B /* PhotoMetadata.swift */, A91DBB422DE41BAE001F42ED /* SnapSafe.xctestplan */, A91DBC532DE58191001F42ED /* SnapSafe */, A9DE375A2DC5F34600679C2C /* SnapSafeTests */, @@ -286,6 +298,10 @@ ); dependencies = ( ); + fileSystemSynchronizedGroups = ( + A9445CBC2DE820E30038119B /* Repositories */, + A9445CBD2DE8210A0038119B /* Data */, + ); name = SnapSafe; packageProductDependencies = ( A9B3E0872DCF1D3C003F1ED3 /* Dependencies */, @@ -423,7 +439,6 @@ A91DBC552DE58191001F42ED /* DetectedFace.swift in Sources */, A91DBC562DE58191001F42ED /* MaskMode.swift in Sources */, A91DBC572DE58191001F42ED /* MemoryManager.swift in Sources */, - A91DBC582DE58191001F42ED /* PhotoFilter.swift in Sources */, A91DBC592DE58191001F42ED /* SecurePhoto.swift in Sources */, A91DBC5A2DE58191001F42ED /* FaceBoxView.swift in Sources */, A91DBC5B2DE58191001F42ED /* FaceDetectionControlsView.swift in Sources */, @@ -433,6 +448,7 @@ A91DBC5F2DE58191001F42ED /* ZoomLevelIndicator.swift in Sources */, A91DBC602DE58191001F42ED /* ZoomableModifier.swift in Sources */, A91DBC612DE58191001F42ED /* EnhancedPhotoDetailView.swift in Sources */, + A9445CBB2DE7DD7B0038119B /* PhotoMetadata.swift in Sources */, A91DBC622DE58191001F42ED /* ImageInfoView.swift in Sources */, A91DBC632DE58191001F42ED /* PhotoDetail.swift in Sources */, A91DBC642DE58191001F42ED /* PhotoDetailView.swift in Sources */, diff --git a/SnapSafe.xctestplan b/SnapSafe.xctestplan index 6f8b07e..791b866 100644 --- a/SnapSafe.xctestplan +++ b/SnapSafe.xctestplan @@ -9,12 +9,18 @@ } ], "defaultOptions" : { + "addressSanitizer" : { + "detectStackUseAfterReturn" : true, + "enabled" : true + }, "targetForVariableExpansion" : { "containerPath" : "container:SnapSafe.xcodeproj", "identifier" : "A9DE37462DC5F34400679C2C", "name" : "SnapSafe" }, - "testExecutionOrdering" : "random" + "testExecutionOrdering" : "random", + "testRepetitionMode" : "retryOnFailure", + "undefinedBehaviorSanitizerEnabled" : true }, "testTargets" : [ { diff --git a/SnapSafe/AppStateCoordinator.swift b/SnapSafe/AppStateCoordinator.swift index be0af55..a9353db 100644 --- a/SnapSafe/AppStateCoordinator.swift +++ b/SnapSafe/AppStateCoordinator.swift @@ -5,86 +5,85 @@ // Created by Bill Booth on 5/22/25. // -import SwiftUI import Combine +import SwiftUI -/// Coordinator to manage app state transitions and handle authentication class AppStateCoordinator: ObservableObject { // Singleton instance static let shared = AppStateCoordinator() - + // Published properties @Published var needsAuthentication = false @Published var wasInBackground = false @Published var dismissAllSheets = false - + // Reference to PIN Manager private let pinManager = PINManager.shared - + // Subscriptions to manage cleanup private var cancellables = Set() - + private init() { // Listen for scene phase notifications via NotificationCenter as a backup mechanism // This ensures we catch transitions even in modal sheets NotificationCenter.default.publisher(for: UIApplication.didEnterBackgroundNotification) - .sink { [weak self] _ in + .sink { [weak self] _ in self?.handleDidEnterBackground() } .store(in: &cancellables) - + NotificationCenter.default.publisher(for: UIApplication.willEnterForegroundNotification) .sink { [weak self] _ in self?.handleWillEnterForeground() } .store(in: &cancellables) - + print("AppStateCoordinator initialized") } - - /// Handle when app enters background + + // Handle when app enters background func handleDidEnterBackground() { print("App entered background") wasInBackground = true } - - /// Handle when app will enter foreground + + // Handle when app will enter foreground func handleWillEnterForeground() { print("App will enter foreground, wasInBackground: \(wasInBackground)") - if wasInBackground && pinManager.isPINSet && pinManager.requirePINOnResume { + if wasInBackground, pinManager.isPINSet, pinManager.requirePINOnResume { // Need to dismiss any open sheets and show authentication print("Requiring authentication after background") dismissAllSheets = true - + // Slight delay to ensure sheets are dismissed first DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { self.needsAuthentication = true } } - + // Update last active time pinManager.updateLastActiveTime() } - - /// Reset authentication state + + // Reset authentication state func resetAuthenticationState() { needsAuthentication = false wasInBackground = false dismissAllSheets = false } - - /// Signal that authentication is complete + + // Signal that authentication is complete func authenticationComplete() { needsAuthentication = false wasInBackground = false } } -/// ViewModifier to handle app state transitions +// ViewModifier to handle app state transitions struct AppStateHandler: ViewModifier { @ObservedObject private var coordinator = AppStateCoordinator.shared @Binding var isPresented: Bool - + func body(content: Content) -> some View { content .onChange(of: coordinator.dismissAllSheets) { _, shouldDismiss in @@ -97,8 +96,8 @@ struct AppStateHandler: ViewModifier { } extension View { - /// Apply app state handling to modal sheets + // Apply app state handling to modal sheets func handleAppState(isPresented: Binding) -> some View { modifier(AppStateHandler(isPresented: isPresented)) } -} \ No newline at end of file +} diff --git a/SnapSafe/CamControl.swift b/SnapSafe/CamControl.swift deleted file mode 100644 index 4e922d2..0000000 --- a/SnapSafe/CamControl.swift +++ /dev/null @@ -1,200 +0,0 @@ -// -// CamControl.swift -// SnapSafe -// -// Created by Bill Booth on 5/3/25. -// - -import AVFoundation -import CoreGraphics -import CoreLocation -import ImageIO -import Photos -import UIKit - -class SecureCameraController: UIViewController, AVCapturePhotoCaptureDelegate { - private var captureSession: AVCaptureSession! - private var photoOutput: AVCapturePhotoOutput! - private var previewLayer: AVCaptureVideoPreviewLayer! -// private let encryptionManager = EncryptionManager() - - override func viewDidLoad() { - super.viewDidLoad() - setupCamera() - } - - private func setupCamera() { - captureSession = AVCaptureSession() - - guard let backCamera = AVCaptureDevice.default(for: .video) else { - // Handle camera unavailable - return - } - - do { - let input = try AVCaptureDeviceInput(device: backCamera) - captureSession.addInput(input) - - photoOutput = AVCapturePhotoOutput() - captureSession.addOutput(photoOutput) - - // Set quality prioritization to maximum quality over speed - photoOutput.maxPhotoQualityPrioritization = .quality - print("📸 Set photo quality prioritization to maximum quality") - - // Prepare for zero shutter lag - if photoOutput.isFastCapturePrioritizationSupported { - print("Fast capture prioritization is supported, preparing zero shutter lag pipeline") - let zslSettings = AVCapturePhotoSettings() - photoOutput.setPreparedPhotoSettingsArray([zslSettings]) - } - - previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) - previewLayer.frame = view.bounds - previewLayer.videoGravity = .resizeAspectFill - view.layer.addSublayer(previewLayer) - - // Configure camera device for optimal quality - try backCamera.lockForConfiguration() - - // Enable subject area change monitoring - backCamera.isSubjectAreaChangeMonitoringEnabled = true - print("Enabled subject area change monitoring") - - if backCamera.isExposureModeSupported(.continuousAutoExposure) { - // Use a faster shutter speed (1/500 sec) for sharper images - let fastShutter = CMTime(value: 1, timescale: 500) // 1/500 sec - // Set ISO to a reasonable value (or max if needed) - let iso = min(backCamera.activeFormat.maxISO, 400) - - // Only set custom exposure if we're in good lighting conditions - if backCamera.exposureDuration.seconds < 0.1 { // Current exposure is faster than 1/10s - print("Setting shutter-priority exposure: 1/500s, ISO: \(iso)") - backCamera.setExposureModeCustom(duration: fastShutter, iso: iso) { _ in - // After setting custom exposure, lock it to prevent auto changes - try? backCamera.lockForConfiguration() - backCamera.exposureMode = .locked - backCamera.unlockForConfiguration() - } - } - } - - backCamera.unlockForConfiguration() - - // Add observer for subject area changes - NotificationCenter.default.addObserver( - self, - selector: #selector(subjectAreaDidChange), - name: .AVCaptureDeviceSubjectAreaDidChange, - object: backCamera - ) - - captureSession.startRunning() - } catch { - // Handle camera setup error - } - } - - // Handle subject area changes by refocusing - @objc private func subjectAreaDidChange(notification: NSNotification) { - guard let device = notification.object as? AVCaptureDevice else { return } - - // Refocus to center or last known focus point - let focusPoint = CGPoint(x: 0.5, y: 0.5) // Default to center - - do { - try device.lockForConfiguration() - - // Set focus point and mode if supported - if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) { - device.focusPointOfInterest = focusPoint - device.focusMode = .autoFocus - print("📸 Refocusing after subject area change") - } - - // Set exposure point if supported - if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure) { - device.exposurePointOfInterest = focusPoint - device.exposureMode = .continuousAutoExposure - } - - device.unlockForConfiguration() - } catch { - print("Error refocusing: \(error.localizedDescription)") - } - } - - func capturePhoto() { - let settings: AVCapturePhotoSettings - settings = AVCapturePhotoSettings() - settings.photoQualityPrioritization = .quality - photoOutput.capturePhoto(with: settings, delegate: self) - } - - func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { - guard error == nil else { - // Handle photo capture error - print("Error capturing photo: \(error!.localizedDescription)") - return - } - - // Extract and process EXIF data - if photo.fileDataRepresentation() != nil { -// processAndSecurePhoto(data) - } - } - - func photoOutput(_: AVCapturePhotoOutput, didFinishCapturingDeferredPhotoProxy proxy: AVCaptureDeferredPhotoProxy?, error: Error?) { - guard error == nil else { - print("Error with deferred photo: \(error!.localizedDescription)") - return - } - } - - private func processAndSecurePhoto(_ photoData: Data) { - // Extract EXIF data before encryption - if let image = UIImage(data: photoData), - let _ = image.cgImage, - let metadata = extractMetadata(from: photoData) - { - // Process EXIF data (location, timestamps, etc.) - let processedEXIF = processEXIFData(metadata) - - // Save the photo without encryption for now - do { - // In a real implementation, we would encrypt the data first - let secureFileManager = SecureFileManager() - _ = try secureFileManager.savePhoto(photoData, withMetadata: processedEXIF) - } catch { - // Handle save error - print("Error saving photo: \(error.localizedDescription)") - } - } - } - - private func extractMetadata(from imageData: Data) -> [String: Any]? { - guard let source = CGImageSourceCreateWithData(imageData as CFData, nil) else { - return nil - } - - guard let metadata = CGImageSourceCopyPropertiesAtIndex(source, 0, nil) as? [String: Any] else { - return nil - } - - return metadata - } - - private func processEXIFData(_ metadata: [String: Any]) -> [String: Any] { - let processedMetadata = metadata - - // Extract GPS data if available - if metadata[String(kCGImagePropertyGPSDictionary)] is [String: Any] { - // Process GPS data as needed - // Store separate from image for security - } - - // Process other EXIF data as needed - - return processedMetadata - } -} diff --git a/SnapSafe/Models/PhotoFilter.swift b/SnapSafe/Data/PhotoFilter.swift similarity index 74% rename from SnapSafe/Models/PhotoFilter.swift rename to SnapSafe/Data/PhotoFilter.swift index 4805608..562929e 100644 --- a/SnapSafe/Models/PhotoFilter.swift +++ b/SnapSafe/Data/PhotoFilter.swift @@ -12,17 +12,17 @@ enum PhotoFilter: String, CaseIterable { case imported = "Imported Photos" case edited = "Edited Photos" case withLocation = "Photos with Location" - + var systemImage: String { switch self { case .all: - return "photo.stack" + "photo.stack" case .imported: - return "square.and.arrow.down" + "square.and.arrow.down" case .edited: - return "pencil.circle" + "pencil.circle" case .withLocation: - return "location.circle" + "location.circle" } } -} \ No newline at end of file +} diff --git a/SnapSafe/DependencyInjection/RepositoryEnvironment.swift b/SnapSafe/DependencyInjection/RepositoryEnvironment.swift new file mode 100644 index 0000000..3030b66 --- /dev/null +++ b/SnapSafe/DependencyInjection/RepositoryEnvironment.swift @@ -0,0 +1,131 @@ +// +// RepositoryEnvironment.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import SwiftUI + +// MARK: - Environment Keys + +private struct SecureImageRepositoryKey: EnvironmentKey { + static let defaultValue: SecureImageRepositoryProtocol = { + do { + let keyManager = KeyManager() + let fileSystemDataSource = try FileSystemDataSource() + let encryptionDataSource = EncryptionDataSource(keyManager: keyManager) + let metadataDataSource = try MetadataDataSource() + let cacheDataSource = CacheDataSource() + + return SecureImageRepository( + fileSystemDataSource: fileSystemDataSource, + encryptionDataSource: encryptionDataSource, + metadataDataSource: metadataDataSource, + cacheDataSource: cacheDataSource + ) + } catch { + fatalError("Failed to initialize SecureImageRepository: \(error)") + } + }() +} + +private struct PhotoLibraryUseCaseKey: EnvironmentKey { + static let defaultValue: PhotoLibraryUseCase = .init(repository: SecureImageRepositoryKey.defaultValue) +} + +private struct PhotoImportUseCaseKey: EnvironmentKey { + static let defaultValue: PhotoImportUseCase = .init(repository: SecureImageRepositoryKey.defaultValue) +} + +private struct PhotoExportUseCaseKey: EnvironmentKey { + static let defaultValue: PhotoExportUseCase = .init(repository: SecureImageRepositoryKey.defaultValue) +} + +// MARK: - Environment Extensions + +extension EnvironmentValues { + var secureImageRepository: SecureImageRepositoryProtocol { + get { self[SecureImageRepositoryKey.self] } + set { self[SecureImageRepositoryKey.self] = newValue } + } + + var photoLibraryUseCase: PhotoLibraryUseCase { + get { self[PhotoLibraryUseCaseKey.self] } + set { self[PhotoLibraryUseCaseKey.self] = newValue } + } + + var photoImportUseCase: PhotoImportUseCase { + get { self[PhotoImportUseCaseKey.self] } + set { self[PhotoImportUseCaseKey.self] = newValue } + } + + var photoExportUseCase: PhotoExportUseCase { + get { self[PhotoExportUseCaseKey.self] } + set { self[PhotoExportUseCaseKey.self] = newValue } + } +} + +// MARK: - Dependency Container + +enum DependencyContainer { + static func setupLiveEnvironment() -> some View { + EmptyView() + .environment(\.secureImageRepository, createLiveRepository()) + .environment(\.photoLibraryUseCase, createLivePhotoLibraryUseCase()) + .environment(\.photoImportUseCase, createLivePhotoImportUseCase()) + .environment(\.photoExportUseCase, createLivePhotoExportUseCase()) + } + + static func setupTestEnvironment( + repository: SecureImageRepositoryProtocol? = nil, + photoLibraryUseCase: PhotoLibraryUseCase? = nil, + photoImportUseCase: PhotoImportUseCase? = nil, + photoExportUseCase: PhotoExportUseCase? = nil + ) -> some View { + let testRepository = repository ?? createTestRepository() + + return EmptyView() + .environment(\.secureImageRepository, testRepository) + .environment(\.photoLibraryUseCase, photoLibraryUseCase ?? PhotoLibraryUseCase(repository: testRepository)) + .environment(\.photoImportUseCase, photoImportUseCase ?? PhotoImportUseCase(repository: testRepository)) + .environment(\.photoExportUseCase, photoExportUseCase ?? PhotoExportUseCase(repository: testRepository)) + } + + private static func createLiveRepository() -> SecureImageRepositoryProtocol { + do { + let keyManager = KeyManager() + let fileSystemDataSource = try FileSystemDataSource() + let encryptionDataSource = EncryptionDataSource(keyManager: keyManager) + let metadataDataSource = try MetadataDataSource() + let cacheDataSource = CacheDataSource() + + return SecureImageRepository( + fileSystemDataSource: fileSystemDataSource, + encryptionDataSource: encryptionDataSource, + metadataDataSource: metadataDataSource, + cacheDataSource: cacheDataSource + ) + } catch { + fatalError("Failed to create live repository: \(error)") + } + } + + private static func createLivePhotoLibraryUseCase() -> PhotoLibraryUseCase { + PhotoLibraryUseCase(repository: createLiveRepository()) + } + + private static func createLivePhotoImportUseCase() -> PhotoImportUseCase { + PhotoImportUseCase(repository: createLiveRepository()) + } + + private static func createLivePhotoExportUseCase() -> PhotoExportUseCase { + PhotoExportUseCase(repository: createLiveRepository()) + } + + private static func createTestRepository() -> SecureImageRepositoryProtocol { + // For testing, return a mock repository + // Note: In actual tests, import the shared MockSecureImageRepository + fatalError("Use MockSecureImageRepository from test target") + } +} diff --git a/SnapSafe/Models/AppearanceMode.swift b/SnapSafe/Models/AppearanceMode.swift index c4bee7d..59bd04d 100644 --- a/SnapSafe/Models/AppearanceMode.swift +++ b/SnapSafe/Models/AppearanceMode.swift @@ -7,25 +7,24 @@ import SwiftUI -// Appearance mode enum for dark mode setting enum AppearanceMode: String, CaseIterable { - case system = "system" - case light = "light" - case dark = "dark" - + case system + case light + case dark + var displayName: String { switch self { - case .system: return "System" - case .light: return "Light" - case .dark: return "Dark" + case .system: "System" + case .light: "Light" + case .dark: "Dark" } } - + var colorScheme: ColorScheme? { switch self { - case .system: return nil - case .light: return .light - case .dark: return .dark + case .system: nil + case .light: .light + case .dark: .dark } } -} \ No newline at end of file +} diff --git a/SnapSafe/CameraModel.swift b/SnapSafe/Models/CameraModel.swift similarity index 69% rename from SnapSafe/CameraModel.swift rename to SnapSafe/Models/CameraModel.swift index 14b3559..bd987e3 100644 --- a/SnapSafe/CameraModel.swift +++ b/SnapSafe/Models/CameraModel.swift @@ -9,22 +9,23 @@ import SwiftUI // Camera model that handles the AVFoundation functionality class CameraModel: NSObject, ObservableObject { - // MARK: - Debug/Simulator Detection + private var isRunningInSimulator: Bool { #if DEBUG && targetEnvironment(simulator) - return true + return true #else - return false + return false #endif } + @Published var isPermissionGranted = false @Published var session = AVCaptureSession() @Published var alert = false @Published var output = AVCapturePhotoOutput() @Published var preview: AVCaptureVideoPreviewLayer! @Published var recentImage: UIImage? - + // Zoom and lens configuration @Published var zoomFactor: CGFloat = 1.0 @Published var minZoom: CGFloat = 0.5 @@ -33,24 +34,25 @@ class CameraModel: NSObject, ObservableObject { private var currentDevice: AVCaptureDevice? private var wideAngleDevice: AVCaptureDevice? private var ultraWideDevice: AVCaptureDevice? - + enum CameraLensType { - case ultraWide // 0.5x zoom - case wideAngle // 1x zoom (standard) + case ultraWide // 0.5x zoom + case wideAngle // 1x zoom (standard) } + @Published var currentLensType: CameraLensType = .wideAngle - + // UI interaction properties var viewSize: CGSize = .zero @Published var focusIndicatorPoint: CGPoint? = nil @Published var showingFocusIndicator = false @Published var flashMode: AVCaptureDevice.FlashMode = .auto @Published var cameraPosition: AVCaptureDevice.Position = .back - + private var isConfiguring = false private var focusResetTimer: Timer? - private var lastFocusPoint: CGPoint = CGPoint(x: 0.5, y: 0.5) - + private var lastFocusPoint: CGPoint = .init(x: 0.5, y: 0.5) + // Setup subject area change monitoring for improved autofocus private func setupSubjectAreaChangeMonitoring(for device: AVCaptureDevice) { NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: device) @@ -61,47 +63,47 @@ class CameraModel: NSObject, ObservableObject { object: device ) } - - @objc private func subjectAreaDidChange(notification: Notification) { + + @objc private func subjectAreaDidChange(notification _: Notification) { refocusCamera() } - + // Refocus camera to last focus point when subject area changes private func refocusCamera() { guard let device = currentDevice else { return } - + if device.focusMode != .locked { do { try device.lockForConfiguration() - - if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) { + + if device.isFocusPointOfInterestSupported, device.isFocusModeSupported(.autoFocus) { device.focusPointOfInterest = lastFocusPoint device.focusMode = .autoFocus } - - if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) { + + if device.isExposurePointOfInterestSupported, device.isExposureModeSupported(.autoExpose) { device.exposurePointOfInterest = lastFocusPoint device.exposureMode = .autoExpose } - + device.unlockForConfiguration() focusResetTimer?.invalidate() focusResetTimer = Timer.scheduledTimer(withTimeInterval: 2.0, repeats: false) { [weak self] _ in self?.resetToAutoFocus() } - + } catch { print("Error refocusing: \(error.localizedDescription)") } } } - + private let secureFileManager = SecureFileManager() - + // Initialize camera with delayed permission check to prevent race conditions override init() { super.init() - + // Listen for app entering foreground to reset zoom level NotificationCenter.default.addObserver( self, @@ -109,40 +111,40 @@ class CameraModel: NSObject, ObservableObject { name: UIApplication.willEnterForegroundNotification, object: nil ) - + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { DispatchQueue.global(qos: .userInitiated).async { [weak self] in - guard let self = self else { return } - self.checkPermissions() + guard let self else { return } + checkPermissions() } } } - + deinit { if let device = currentDevice { NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: device) } NotificationCenter.default.removeObserver(self, name: UIApplication.willEnterForegroundNotification, object: nil) } - + @objc private func handleAppWillEnterForeground() { print("CameraModel: App entering foreground, resetting zoom level") resetZoomLevel() } - + func checkPermissions() { #if DEBUG && targetEnvironment(simulator) - if isRunningInSimulator { - DispatchQueue.main.async { - self.isPermissionGranted = true - } - DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: .now() + 0.2) { - self.setupCamera() + if isRunningInSimulator { + DispatchQueue.main.async { + self.isPermissionGranted = true + } + DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: .now() + 0.2) { + self.setupCamera() + } + return } - return - } #endif - + switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: DispatchQueue.main.async { @@ -174,7 +176,7 @@ class CameraModel: NSObject, ObservableObject { } } } - + // Get camera devices with fallback for ultra-wide private func ultraWideCamera() -> AVCaptureDevice? { if let ultraWide = AVCaptureDevice.default(.builtInUltraWideCamera, for: .video, position: .back) { @@ -182,34 +184,34 @@ class CameraModel: NSObject, ObservableObject { } return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) } - + private func wideAngleCamera(position: AVCaptureDevice.Position = .back) -> AVCaptureDevice? { - return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position) + AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position) } - + func setupCamera() { #if DEBUG && targetEnvironment(simulator) - if isRunningInSimulator { - setupSimulatorMockCamera() - return - } + if isRunningInSimulator { + setupSimulatorMockCamera() + return + } #endif - + session.sessionPreset = .photo session.automaticallyConfiguresApplicationAudioSession = false - + do { session.beginConfiguration() - + wideAngleDevice = wideAngleCamera(position: cameraPosition) - + if cameraPosition == .back { ultraWideDevice = ultraWideCamera() } - + var device: AVCaptureDevice? let shouldUseUltraWide = currentLensType == .ultraWide && ultraWideDevice != nil && cameraPosition == .back - + if shouldUseUltraWide { device = ultraWideDevice } else { @@ -218,267 +220,257 @@ class CameraModel: NSObject, ObservableObject { self.currentLensType = .wideAngle } } - - guard let device = device else { + + guard let device else { print("Failed to get camera device for position: \(cameraPosition)") return } - + currentDevice = device - + // Configure device with optimal camera settings try device.lockForConfiguration() - + let minZoomValue: CGFloat = 1.0 let maxZoomValue = min(device.activeFormat.videoMaxZoomFactor, 10.0) let defaultZoomValue: CGFloat = 1.0 - + device.videoZoomFactor = defaultZoomValue - + // Enable continuous auto modes with smooth transitions if device.isFocusModeSupported(.continuousAutoFocus) { device.focusMode = .continuousAutoFocus device.isSmoothAutoFocusEnabled = true - + if device.isAutoFocusRangeRestrictionSupported { device.autoFocusRangeRestriction = .none } } - + if device.isExposureModeSupported(.continuousAutoExposure) { device.exposureMode = .continuousAutoExposure } - + if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) { device.whiteBalanceMode = .continuousAutoWhiteBalance } - + device.isSubjectAreaChangeMonitoringEnabled = true - + device.unlockForConfiguration() - + let input = try AVCaptureDeviceInput(device: device) if session.canAddInput(input) { session.addInput(input) } - + if session.canAddOutput(output) { session.addOutput(output) configurePhotoOutputForMaxQuality() } - + session.commitConfiguration() - + DispatchQueue.main.async { self.minZoom = minZoomValue self.maxZoom = maxZoomValue self.zoomFactor = defaultZoomValue } - + setupSubjectAreaChangeMonitoring(for: device) startPeriodicFocusCheck() prepareZeroShutterLagCapture() - + } catch { print("Error setting up camera: \(error.localizedDescription)") } } - + #if DEBUG && targetEnvironment(simulator) - // MARK: - Simulator Mock Camera Setup - private func setupSimulatorMockCamera() { - print("Setting up mock camera for simulator") - - DispatchQueue.main.async { - self.minZoom = 0.5 - self.maxZoom = 10.0 - self.zoomFactor = 1.0 - } - - // Create mock photos for simulator - createMockPhotos() - } - - private func captureMockPhoto() { - print("Capturing mock photo in simulator") - - // Create a simple colored image for testing - let size = CGSize(width: 1080, height: 1920) - let colors: [UIColor] = [.systemBlue, .systemGreen, .systemOrange, .systemPurple, .systemRed] - let randomColor = colors.randomElement() ?? .systemBlue - - UIGraphicsBeginImageContextWithOptions(size, true, 1.0) - randomColor.setFill() - UIRectFill(CGRect(origin: .zero, size: size)) - - // Add some text to make it look like a photo - let text = "Mock Photo\n\(Date().formatted())\nCamera: \(cameraPosition == .back ? "Back" : "Front")" - let attributes: [NSAttributedString.Key: Any] = [ - .font: UIFont.systemFont(ofSize: 48, weight: .bold), - .foregroundColor: UIColor.white, - ] - - let textSize = text.size(withAttributes: attributes) - let textRect = CGRect( - x: (size.width - textSize.width) / 2, - y: (size.height - textSize.height) / 2, - width: textSize.width, - height: textSize.height - ) - - text.draw(in: textRect, withAttributes: attributes) - - let mockImage = UIGraphicsGetImageFromCurrentImageContext()! - UIGraphicsEndImageContext() - - // Convert to JPEG data - guard let imageData = mockImage.jpegData(compressionQuality: 0.8) else { - print("Failed to create mock image data") - return - } - - // Update recent image - DispatchQueue.main.async { - self.recentImage = mockImage + + // MARK: - Simulator Mock Camera Setup + + private func setupSimulatorMockCamera() { + print("Setting up mock camera for simulator") + + DispatchQueue.main.async { + self.minZoom = 0.5 + self.maxZoom = 10.0 + self.zoomFactor = 1.0 + } + + // Create mock photos for simulator + createMockPhotos() } - - // Save the mock photo - saveMockPhoto(imageData) - } - - private func createMockPhotos() { - DispatchQueue.global(qos: .background).async { - // Create a few sample photos for the gallery - let sampleTexts = [ - "Sample Photo 1\nLandscape", - "Sample Photo 2\nPortrait", - "Sample Photo 3\nSquare" + + private func captureMockPhoto() { + print("Capturing mock photo in simulator") + + // Create a simple colored image for testing + let size = CGSize(width: 1080, height: 1920) + let colors: [UIColor] = [.systemBlue, .systemGreen, .systemOrange, .systemPurple, .systemRed] + let randomColor = colors.randomElement() ?? .systemBlue + + UIGraphicsBeginImageContextWithOptions(size, true, 1.0) + randomColor.setFill() + UIRectFill(CGRect(origin: .zero, size: size)) + + // Add some text to make it look like a photo + let text = "Mock Photo\n\(Date().formatted())\nCamera: \(cameraPosition == .back ? "Back" : "Front")" + let attributes: [NSAttributedString.Key: Any] = [ + .font: UIFont.systemFont(ofSize: 48, weight: .bold), + .foregroundColor: UIColor.white, ] - - for (index, text) in sampleTexts.enumerated() { - let isLandscape = index == 0 - let size = isLandscape ? CGSize(width: 1920, height: 1080) : CGSize(width: 1080, height: 1920) - let color: UIColor = [.systemBlue, .systemGreen, .systemOrange][index] - - UIGraphicsBeginImageContextWithOptions(size, true, 1.0) - color.setFill() - UIRectFill(CGRect(origin: .zero, size: size)) - - let attributes: [NSAttributedString.Key: Any] = [ - .font: UIFont.systemFont(ofSize: 48, weight: .bold), - .foregroundColor: UIColor.white, + + let textSize = text.size(withAttributes: attributes) + let textRect = CGRect( + x: (size.width - textSize.width) / 2, + y: (size.height - textSize.height) / 2, + width: textSize.width, + height: textSize.height + ) + + text.draw(in: textRect, withAttributes: attributes) + + let mockImage = UIGraphicsGetImageFromCurrentImageContext()! + UIGraphicsEndImageContext() + + // Convert to JPEG data + guard let imageData = mockImage.jpegData(compressionQuality: 0.8) else { + print("Failed to create mock image data") + return + } + + // Update recent image + DispatchQueue.main.async { + self.recentImage = mockImage + } + + // Save the mock photo + saveMockPhoto(imageData) + } + + private func createMockPhotos() { + DispatchQueue.global(qos: .background).async { + // Create a few sample photos for the gallery + let sampleTexts = [ + "Sample Photo 1\nLandscape", + "Sample Photo 2\nPortrait", + "Sample Photo 3\nSquare", ] - - let textSize = text.size(withAttributes: attributes) - let textRect = CGRect( - x: (size.width - textSize.width) / 2, - y: (size.height - textSize.height) / 2, - width: textSize.width, - height: textSize.height - ) - - text.draw(in: textRect, withAttributes: attributes) - - if let mockImage = UIGraphicsGetImageFromCurrentImageContext(), - let imageData = mockImage.jpegData(compressionQuality: 0.8) { - - let metadata: [String: Any] = [ - "creationDate": Date().timeIntervalSince1970 - Double(index * 3600), // Stagger by hours - "cameraPosition": "back", - "isLandscape": isLandscape, - "mockPhoto": true + + for (index, text) in sampleTexts.enumerated() { + let isLandscape = index == 0 + let size = isLandscape ? CGSize(width: 1920, height: 1080) : CGSize(width: 1080, height: 1920) + let color: UIColor = [.systemBlue, .systemGreen, .systemOrange][index] + + UIGraphicsBeginImageContextWithOptions(size, true, 1.0) + color.setFill() + UIRectFill(CGRect(origin: .zero, size: size)) + + let attributes: [NSAttributedString.Key: Any] = [ + .font: UIFont.systemFont(ofSize: 48, weight: .bold), + .foregroundColor: UIColor.white, ] - - do { - _ = try self.secureFileManager.savePhoto(imageData, withMetadata: metadata) - print("Created mock photo \(index + 1)") - } catch { - print("Error creating mock photo: \(error)") + + let textSize = text.size(withAttributes: attributes) + let textRect = CGRect( + x: (size.width - textSize.width) / 2, + y: (size.height - textSize.height) / 2, + width: textSize.width, + height: textSize.height + ) + + text.draw(in: textRect, withAttributes: attributes) + + if let mockImage = UIGraphicsGetImageFromCurrentImageContext(), + let imageData = mockImage.jpegData(compressionQuality: 0.8) + { + let metadata: [String: Any] = [ + "creationDate": Date().timeIntervalSince1970 - Double(index * 3600), // Stagger by hours + "cameraPosition": "back", + "isLandscape": isLandscape, + "mockPhoto": true, + ] + + do { + _ = try self.secureFileManager.savePhoto(imageData, withMetadata: metadata) + print("Created mock photo \(index + 1)") + } catch { + print("Error creating mock photo: \(error)") + } } + + UIGraphicsEndImageContext() } - - UIGraphicsEndImageContext() } } - } - - private func saveMockPhoto(_ imageData: Data) { - DispatchQueue.global(qos: .userInitiated).async { - let metadata: [String: Any] = [ - "creationDate": Date().timeIntervalSince1970, - "cameraPosition": self.cameraPosition == .front ? "front" : "back", - "isLandscape": false, // Mock photos are portrait by default - "mockPhoto": true - ] - - do { - let filename = try self.secureFileManager.savePhoto(imageData, withMetadata: metadata) - print("Mock photo saved successfully: \(filename)") - } catch { - print("Error saving mock photo: \(error.localizedDescription)") + + private func saveMockPhoto(_ imageData: Data) { + DispatchQueue.global(qos: .userInitiated).async { + let metadata: [String: Any] = [ + "creationDate": Date().timeIntervalSince1970, + "cameraPosition": self.cameraPosition == .front ? "front" : "back", + "isLandscape": false, // Mock photos are portrait by default + "mockPhoto": true, + ] + + do { + let filename = try self.secureFileManager.savePhoto(imageData, withMetadata: metadata) + print("Mock photo saved successfully: \(filename)") + } catch { + print("Error saving mock photo: \(error.localizedDescription)") + } } } - } #endif - + private func configurePhotoOutputForMaxQuality() { output.maxPhotoQualityPrioritization = .quality } - + private func prepareZeroShutterLagCapture() { // TODO/debug - return } - + private var focusCheckTimer: Timer? - + private func startPeriodicFocusCheck() { focusCheckTimer?.invalidate() focusCheckTimer = Timer.scheduledTimer(withTimeInterval: 3.0, repeats: true) { [weak self] _ in self?.checkAndOptimizeFocus() } } - + // Ensure continuous auto-focus remains active private func checkAndOptimizeFocus() { guard let device = currentDevice else { return } - + if device.focusMode != .locked { do { try device.lockForConfiguration() - - if device.focusMode != .continuousAutoFocus && device.isFocusModeSupported(.continuousAutoFocus) { + + if device.focusMode != .continuousAutoFocus, device.isFocusModeSupported(.continuousAutoFocus) { device.focusMode = .continuousAutoFocus } - + device.unlockForConfiguration() } catch { print("Error in focus check: \(error.localizedDescription)") } } } - - // Map device orientations to rotation angles for horizon-level capture - private func rotationAngle(for orientation: UIDeviceOrientation) -> Double { - switch orientation { - case .portrait: return 90 - case .portraitUpsideDown: return 270 - case .landscapeLeft: return 0 - case .landscapeRight: return 180 - default: return 0 - } - } - + func capturePhoto() { #if DEBUG && targetEnvironment(simulator) - if isRunningInSimulator { - captureMockPhoto() - return - } + if isRunningInSimulator { + captureMockPhoto() + return + } #endif - + let photoSettings = createAdvancedPhotoSettings() - + // Configure flash based on camera position if cameraPosition == .back { if output.supportedFlashModes.contains(AVCaptureDevice.FlashMode(rawValue: flashMode.rawValue)!) { @@ -487,56 +479,56 @@ class CameraModel: NSObject, ObservableObject { } else { photoSettings.flashMode = .off } - + // Set proper rotation using AVCaptureDevice.RotationCoordinator guard let connection = output.connection(with: .video) else { output.capturePhoto(with: photoSettings, delegate: self) return } - + guard let deviceInput = session.inputs - .compactMap({ $0 as? AVCaptureDeviceInput }) - .first(where: { $0.device.hasMediaType(.video) }) + .compactMap({ $0 as? AVCaptureDeviceInput }) + .first(where: { $0.device.hasMediaType(.video) }) else { output.capturePhoto(with: photoSettings, delegate: self) return } - + let rotationCoordinator = AVCaptureDevice.RotationCoordinator( device: deviceInput.device, previewLayer: preview ) - + connection.videoRotationAngle = rotationCoordinator.videoRotationAngleForHorizonLevelCapture - + output.capturePhoto(with: photoSettings, delegate: self) } - + private func createAdvancedPhotoSettings() -> AVCapturePhotoSettings { let settings = AVCapturePhotoSettings() settings.photoQualityPrioritization = .quality return settings } - + // Smooth zoom with lens-specific adjustments and auto mode restoration func zoom(factor: CGFloat) { guard let device = currentDevice else { return } - + do { try device.lockForConfiguration() - + // Restore auto modes during zoom - if device.isExposureModeSupported(.continuousAutoExposure) && device.exposureMode != .continuousAutoExposure { + if device.isExposureModeSupported(.continuousAutoExposure), device.exposureMode != .continuousAutoExposure { device.exposureMode = .continuousAutoExposure } - - if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) && device.whiteBalanceMode != .continuousAutoWhiteBalance { + + if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance), device.whiteBalanceMode != .continuousAutoWhiteBalance { device.whiteBalanceMode = .continuousAutoWhiteBalance } - + var newZoomFactor = factor - + if currentLensType == .ultraWide { // Map ultra-wide zoom range (0.5x user-facing to device zoom) newZoomFactor = max(0.5, min(newZoomFactor, maxZoom)) @@ -545,10 +537,10 @@ class CameraModel: NSObject, ObservableObject { let currentZoom = device.videoZoomFactor let interpolationFactor: CGFloat = 0.3 let smoothedZoom = currentZoom + (limitedDeviceZoom - currentZoom) * interpolationFactor - + device.videoZoomFactor = smoothedZoom let userFacingZoom = max(0.5, min(newZoomFactor, maxZoom)) - + DispatchQueue.main.async { self.zoomFactor = userFacingZoom } @@ -558,39 +550,39 @@ class CameraModel: NSObject, ObservableObject { let currentZoom = device.videoZoomFactor let interpolationFactor: CGFloat = 0.3 let smoothedZoom = currentZoom + (newZoomFactor - currentZoom) * interpolationFactor - + device.videoZoomFactor = smoothedZoom - + DispatchQueue.main.async { self.zoomFactor = smoothedZoom } } - + device.unlockForConfiguration() } catch { print("Error setting zoom: \(error.localizedDescription)") } } - + // Handle pinch gestures with automatic lens switching and smooth zoom func handlePinchGesture(scale: CGFloat, initialScale: CGFloat? = nil) { if initialScale != nil { initialZoom = zoomFactor } - + let zoomSensitivity: CGFloat = 0.5 let zoomDelta = pow(scale, zoomSensitivity) - 1.0 let newZoomFactor = initialZoom + (zoomDelta * (maxZoom - minZoom)) - + // Determine lens switching thresholds let shouldUseUltraWide = newZoomFactor <= 0.9 && cameraPosition == .back let shouldUseWideAngle = newZoomFactor > 0.9 || cameraPosition == .front - - if shouldUseUltraWide && currentLensType != .ultraWide && ultraWideDevice != nil { + + if shouldUseUltraWide, currentLensType != .ultraWide, ultraWideDevice != nil { if let device = currentDevice { do { try device.lockForConfiguration() - + // Prepare auto modes for smooth lens transition if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) { device.whiteBalanceMode = .continuousAutoWhiteBalance @@ -598,80 +590,80 @@ class CameraModel: NSObject, ObservableObject { if device.isExposureModeSupported(.continuousAutoExposure) { device.exposureMode = .continuousAutoExposure } - + device.unlockForConfiguration() } catch { print("📸 Error preparing auto modes before lens switch: \(error.localizedDescription)") } } - + switchLensType(to: .ultraWide) - } else if shouldUseWideAngle && currentLensType != .wideAngle && wideAngleDevice != nil { + } else if shouldUseWideAngle, currentLensType != .wideAngle, wideAngleDevice != nil { if let device = currentDevice { do { try device.lockForConfiguration() - + if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) { device.whiteBalanceMode = .continuousAutoWhiteBalance } if device.isExposureModeSupported(.continuousAutoExposure) { device.exposureMode = .continuousAutoExposure } - + device.unlockForConfiguration() } catch { print("📸 Error preparing auto modes before lens switch: \(error.localizedDescription)") } } - + switchLensType(to: .wideAngle) } else { // Apply zoom with auto mode restoration if let device = currentDevice { do { try device.lockForConfiguration() - - if device.isExposureModeSupported(.continuousAutoExposure) && device.exposureMode != .continuousAutoExposure { + + if device.isExposureModeSupported(.continuousAutoExposure), device.exposureMode != .continuousAutoExposure { device.exposureMode = .continuousAutoExposure } - - if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) && device.whiteBalanceMode != .continuousAutoWhiteBalance { + + if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance), device.whiteBalanceMode != .continuousAutoWhiteBalance { device.whiteBalanceMode = .continuousAutoWhiteBalance } - + device.unlockForConfiguration() } catch { // Ignore errors here, it's just optimization } } - + zoom(factor: newZoomFactor) } } - + // Tap-to-focus with optional white balance locking func adjustCameraSettings(at point: CGPoint, lockWhiteBalance: Bool = false) { guard let device = currentDevice else { return } lastFocusPoint = point focusResetTimer?.invalidate() - + do { try device.lockForConfiguration() // Set focus and exposure points - if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) { + if device.isFocusPointOfInterestSupported, device.isFocusModeSupported(.autoFocus) { device.focusPointOfInterest = point device.focusMode = .autoFocus - + if device.isSmoothAutoFocusSupported { device.isSmoothAutoFocusEnabled = true } } - - if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) { + + if device.isExposurePointOfInterestSupported, device.isExposureModeSupported(.autoExpose) { device.exposurePointOfInterest = point device.exposureMode = .continuousAutoExposure } - + // Handle white balance based on lock preference if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) { if lockWhiteBalance { @@ -682,9 +674,9 @@ class CameraModel: NSObject, ObservableObject { device.whiteBalanceMode = .continuousAutoWhiteBalance } } - + device.unlockForConfiguration() - + // Schedule auto-focus reset with appropriate delay let resetDelay = lockWhiteBalance ? 8.0 : 3.0 focusResetTimer = Timer.scheduledTimer(withTimeInterval: resetDelay, repeats: false) { [weak self] _ in @@ -694,32 +686,32 @@ class CameraModel: NSObject, ObservableObject { print("Error adjusting camera settings: \(error.localizedDescription)") } } - + // Return to continuous auto modes after manual adjustments private func resetToAutoFocus() { guard let device = currentDevice else { return } - + do { try device.lockForConfiguration() - + if device.isFocusModeSupported(.continuousAutoFocus) { device.focusMode = .continuousAutoFocus } - + if device.isExposureModeSupported(.continuousAutoExposure) { device.exposureMode = .continuousAutoExposure } - + if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) { device.whiteBalanceMode = .continuousAutoWhiteBalance } - + device.unlockForConfiguration() } catch { print("Error resetting focus: \(error.localizedDescription)") } } - + private func normalizeGains(_ gains: AVCaptureDevice.WhiteBalanceGains, for device: AVCaptureDevice) -> AVCaptureDevice.WhiteBalanceGains { var normalizedGains = gains normalizedGains.redGain = max(1.0, min(gains.redGain, device.maxWhiteBalanceGain)) @@ -727,29 +719,29 @@ class CameraModel: NSObject, ObservableObject { normalizedGains.blueGain = max(1.0, min(gains.blueGain, device.maxWhiteBalanceGain)) return normalizedGains } - + // Switch between ultra-wide and wide-angle cameras with white balance preservation func switchLensType(to lensType: CameraLensType) { guard !isConfiguring else { return } - + if lensType == currentLensType || cameraPosition == .front && lensType == .ultraWide { return } - + isConfiguring = true - + DispatchQueue.main.async { self.currentLensType = lensType } - + DispatchQueue.global(qos: .userInteractive).async { [weak self] in - guard let self = self else { return } - + guard let self else { return } + // Capture current white balance settings for smooth transition var previousWhiteBalanceGains: AVCaptureDevice.WhiteBalanceGains? var previousWhiteBalanceMode: AVCaptureDevice.WhiteBalanceMode = .continuousAutoWhiteBalance - - if let oldDevice = self.currentDevice { + + if let oldDevice = currentDevice { do { try oldDevice.lockForConfiguration() previousWhiteBalanceMode = oldDevice.whiteBalanceMode @@ -759,69 +751,68 @@ class CameraModel: NSObject, ObservableObject { print("📸 Could not capture white balance from previous device: \(error.localizedDescription)") } } - - self.session.beginConfiguration() - - if let oldDevice = self.currentDevice { + + session.beginConfiguration() + + if let oldDevice = currentDevice { NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: oldDevice) } - - if let inputs = self.session.inputs as? [AVCaptureDeviceInput] { + + if let inputs = session.inputs as? [AVCaptureDeviceInput] { for input in inputs { - self.session.removeInput(input) + session.removeInput(input) } } - + do { - var device: AVCaptureDevice? - - switch lensType { + var device: AVCaptureDevice? = switch lensType { case .ultraWide: - device = self.ultraWideDevice + ultraWideDevice case .wideAngle: - device = self.wideAngleDevice + wideAngleDevice } - + if device == nil { - if lensType == .ultraWide && self.wideAngleDevice != nil { - self.currentLensType = .wideAngle - device = self.wideAngleDevice + if lensType == .ultraWide, wideAngleDevice != nil { + currentLensType = .wideAngle + device = wideAngleDevice } else { - self.session.commitConfiguration() + session.commitConfiguration() return } } - - guard let device = device else { - self.session.commitConfiguration() + + guard let device else { + session.commitConfiguration() return } - - self.currentDevice = device - + + currentDevice = device + // Configure device with optimal settings and white balance preservation try device.lockForConfiguration() - + device.videoZoomFactor = 1.0 - + if device.isFocusModeSupported(.continuousAutoFocus) { device.focusMode = .continuousAutoFocus device.isSmoothAutoFocusEnabled = true - + if device.isAutoFocusRangeRestrictionSupported { device.autoFocusRangeRestriction = .none } } - + if device.isExposureModeSupported(.continuousAutoExposure) { device.exposureMode = .continuousAutoExposure } - + // Apply white balance transition for smooth lens switching if let previousGains = previousWhiteBalanceGains, - device.isWhiteBalanceModeSupported(.locked) { - let normalizedGains = self.normalizeGains(previousGains, for: device) - + device.isWhiteBalanceModeSupported(.locked) + { + let normalizedGains = normalizeGains(previousGains, for: device) + if previousWhiteBalanceMode == .locked { device.setWhiteBalanceModeLocked(with: normalizedGains) { _ in } } else { @@ -848,84 +839,84 @@ class CameraModel: NSObject, ObservableObject { device.whiteBalanceMode = .continuousAutoWhiteBalance } } - + device.isSubjectAreaChangeMonitoringEnabled = true device.unlockForConfiguration() - + let newInput = try AVCaptureDeviceInput(device: device) - if self.session.canAddInput(newInput) { - self.session.addInput(newInput) + if session.canAddInput(newInput) { + session.addInput(newInput) } - - self.session.commitConfiguration() - self.setupSubjectAreaChangeMonitoring(for: device) - self.configurePhotoOutputForMaxQuality() - self.prepareZeroShutterLagCapture() - - if !self.session.isRunning { + + session.commitConfiguration() + setupSubjectAreaChangeMonitoring(for: device) + configurePhotoOutputForMaxQuality() + prepareZeroShutterLagCapture() + + if !session.isRunning { DispatchQueue.global(qos: .userInteractive).async { self.session.startRunning() } } - - self.isConfiguring = false - + + isConfiguring = false + } catch { print("📸 Error switching lens type: \(error.localizedDescription)") - self.session.commitConfiguration() - self.isConfiguring = false + session.commitConfiguration() + isConfiguring = false } } } - + // Switch between front and back cameras with clean white balance reset func switchCamera(to position: AVCaptureDevice.Position) { guard !isConfiguring else { return } - - if position == cameraPosition && currentDevice != nil { + + if position == cameraPosition, currentDevice != nil { return } - + isConfiguring = true - + DispatchQueue.main.async { self.cameraPosition = position } - + let currentLensTypeSnapshot = currentLensType - - if position == .front && currentLensTypeSnapshot == .ultraWide { + + if position == .front, currentLensTypeSnapshot == .ultraWide { DispatchQueue.main.async { self.currentLensType = .wideAngle } } - + DispatchQueue.global(qos: .userInteractive).async { [weak self] in - guard let self = self else { return } - self.session.beginConfiguration() - - if let oldDevice = self.currentDevice { + guard let self else { return } + session.beginConfiguration() + + if let oldDevice = currentDevice { NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: oldDevice) } - - if let inputs = self.session.inputs as? [AVCaptureDeviceInput] { + + if let inputs = session.inputs as? [AVCaptureDeviceInput] { for input in inputs { - self.session.removeInput(input) + session.removeInput(input) } } - + do { // Update available devices for new position - self.wideAngleDevice = self.wideAngleCamera(position: position) - + wideAngleDevice = wideAngleCamera(position: position) + if position == .back { - self.ultraWideDevice = self.ultraWideCamera() + ultraWideDevice = ultraWideCamera() } else { - self.ultraWideDevice = nil + ultraWideDevice = nil } - + var device: AVCaptureDevice? - if position == .back && currentLensType == .ultraWide && ultraWideDevice != nil { + if position == .back, currentLensType == .ultraWide, ultraWideDevice != nil { device = ultraWideDevice } else { device = wideAngleDevice @@ -935,70 +926,70 @@ class CameraModel: NSObject, ObservableObject { } } } - - guard let device = device else { - self.session.commitConfiguration() + + guard let device else { + session.commitConfiguration() return } - - self.currentDevice = device - + + currentDevice = device + // Configure device with fresh auto white balance for new camera position try device.lockForConfiguration() - + device.videoZoomFactor = 1.0 - + if device.isFocusModeSupported(.continuousAutoFocus) { device.focusMode = .continuousAutoFocus device.isSmoothAutoFocusEnabled = true - + if device.isAutoFocusRangeRestrictionSupported { device.autoFocusRangeRestriction = .none } } - + if device.isExposureModeSupported(.continuousAutoExposure) { device.exposureMode = .continuousAutoExposure } - + // Use clean auto white balance for front/back switches if device.isWhiteBalanceModeSupported(.continuousAutoWhiteBalance) { device.whiteBalanceMode = .continuousAutoWhiteBalance } - + device.isSubjectAreaChangeMonitoringEnabled = true device.unlockForConfiguration() - + let newInput = try AVCaptureDeviceInput(device: device) - if self.session.canAddInput(newInput) { - self.session.addInput(newInput) + if session.canAddInput(newInput) { + session.addInput(newInput) } - - self.session.commitConfiguration() - self.setupSubjectAreaChangeMonitoring(for: device) - self.configurePhotoOutputForMaxQuality() - self.prepareZeroShutterLagCapture() - + + session.commitConfiguration() + setupSubjectAreaChangeMonitoring(for: device) + configurePhotoOutputForMaxQuality() + prepareZeroShutterLagCapture() + DispatchQueue.main.async { self.zoomFactor = 1.0 } - - if !self.session.isRunning { + + if !session.isRunning { DispatchQueue.global(qos: .userInteractive).async { self.session.startRunning() } } - - self.isConfiguring = false - + + isConfiguring = false + } catch { print("📸 Error switching camera: \(error.localizedDescription)") - self.session.commitConfiguration() - self.isConfiguring = false + session.commitConfiguration() + isConfiguring = false } } } - + // Convert device coordinates to view coordinates for UI display func showFocusIndicator(on viewPoint: CGPoint) { DispatchQueue.main.async { @@ -1009,26 +1000,26 @@ class CameraModel: NSObject, ObservableObject { } } } - + // Reset zoom level to 1.0 (called when app comes from background) func resetZoomLevel() { #if DEBUG && targetEnvironment(simulator) - if isRunningInSimulator { - DispatchQueue.main.async { - self.zoomFactor = 1.0 + if isRunningInSimulator { + DispatchQueue.main.async { + self.zoomFactor = 1.0 + } + return } - return - } #endif - + guard let device = currentDevice else { return } - + DispatchQueue.global(qos: .userInitiated).async { do { try device.lockForConfiguration() device.videoZoomFactor = 1.0 device.unlockForConfiguration() - + DispatchQueue.main.async { self.zoomFactor = 1.0 } @@ -1038,10 +1029,11 @@ class CameraModel: NSObject, ObservableObject { } } } - // Photo capture delegate with metadata preservation and secure storage - extension CameraModel: AVCapturePhotoCaptureDelegate { + +// Photo capture delegate with metadata preservation and secure storage +extension CameraModel: AVCapturePhotoCaptureDelegate { func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { - if let error = error { + if let error { print("Error capturing photo: \(error.localizedDescription)") return } @@ -1059,20 +1051,20 @@ class CameraModel: NSObject, ObservableObject { } } } - + // Handle deferred photo processing with instant preview - func photoOutput(_ output: AVCapturePhotoOutput, didFinishCapturingDeferredPhotoProxy proxy: AVCaptureDeferredPhotoProxy?, error: Error?) { + func photoOutput(_: AVCapturePhotoOutput, didFinishCapturingDeferredPhotoProxy proxy: AVCaptureDeferredPhotoProxy?, error: Error?) { guard error == nil else { print("Error with deferred photo: \(error!.localizedDescription)") return } - + if let previewPixelBuffer = proxy?.previewPixelBuffer { let ciImage = CIImage(cvPixelBuffer: previewPixelBuffer) let context = CIContext() if let cgImage = context.createCGImage(ciImage, from: ciImage.extent) { let previewImage = UIImage(cgImage: cgImage) - + DispatchQueue.main.async { self.recentImage = previewImage } @@ -1080,51 +1072,38 @@ class CameraModel: NSObject, ObservableObject { } } - private func fixImageOrientation(_ image: UIImage) -> UIImage { - _ = image.imageOrientation - - if image.imageOrientation == .up { - return image - } - - UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale) - image.draw(in: CGRect(origin: .zero, size: image.size)) - let normalizedImage = UIGraphicsGetImageFromCurrentImageContext()! - UIGraphicsEndImageContext() - - return normalizedImage - } - // Save photo with metadata extraction and secure storage private func savePhoto(_ imageData: Data) { DispatchQueue.global(qos: .userInitiated).async { var metadata: [String: Any] = [:] if let source = CGImageSourceCreateWithData(imageData as CFData, nil), - let imageMetadata = CGImageSourceCopyPropertiesAtIndex(source, 0, nil) as? [String: Any] { + let imageMetadata = CGImageSourceCopyPropertiesAtIndex(source, 0, nil) as? [String: Any] + { metadata = imageMetadata - + // Extract and preserve orientation information - var exifOrientation: Int = 1 - + var exifOrientation = 1 + if let exifDict = metadata[String(kCGImagePropertyExifDictionary)] as? [String: Any], - let orientation = exifDict[String(kCGImagePropertyOrientation)] as? Int { + let orientation = exifDict[String(kCGImagePropertyOrientation)] as? Int + { exifOrientation = orientation - } - else if let tiffDict = metadata[String(kCGImagePropertyTIFFDictionary)] as? [String: Any], - let orientation = tiffDict[String(kCGImagePropertyTIFFOrientation)] as? Int { + } else if let tiffDict = metadata[String(kCGImagePropertyTIFFDictionary)] as? [String: Any], + let orientation = tiffDict[String(kCGImagePropertyTIFFOrientation)] as? Int + { exifOrientation = orientation } - + metadata["originalOrientation"] = exifOrientation metadata["cameraPosition"] = self.cameraPosition == .front ? "front" : "back" - + // Determine landscape orientation based on dimensions and rotation if let pixelWidth = metadata[String(kCGImagePropertyPixelWidth)] as? Int, - let pixelHeight = metadata[String(kCGImagePropertyPixelHeight)] as? Int { - + let pixelHeight = metadata[String(kCGImagePropertyPixelHeight)] as? Int + { let isRotated = (exifOrientation >= 5 && exifOrientation <= 8) - + if isRotated { metadata["isLandscape"] = pixelHeight > pixelWidth } else { @@ -1132,7 +1111,7 @@ class CameraModel: NSObject, ObservableObject { } } } - + do { let filename = try self.secureFileManager.savePhoto(imageData, withMetadata: metadata) print("Photo saved successfully with timestamp filename: \(filename)") diff --git a/SnapSafe/Models/DetectedFace.swift b/SnapSafe/Models/DetectedFace.swift index 732d078..cf135b2 100644 --- a/SnapSafe/Models/DetectedFace.swift +++ b/SnapSafe/Models/DetectedFace.swift @@ -7,73 +7,54 @@ import UIKit -// Define a type alias to this class to avoid conflicts -public typealias DetectedFace = _DetectedFace - -// Class to represent a detected face with selection state and resize capability -public class _DetectedFace: Identifiable { - public let id = UUID() - let bounds: CGRect +struct DetectedFace: Identifiable, Codable, Equatable { + let id = UUID() + let boundingBox: CGRect var isSelected: Bool = false - + // For manually created boxes var isUserCreated: Bool = false - - init(bounds: CGRect, isSelected: Bool = false, isUserCreated: Bool = false) { - self.bounds = bounds + + init(boundingBox: CGRect, isSelected: Bool = false, isUserCreated: Bool = false) { + self.boundingBox = boundingBox self.isSelected = isSelected self.isUserCreated = isUserCreated } - + + // Legacy compatibility + init(bounds: CGRect, isSelected: Bool = false, isUserCreated: Bool = false) { + self.init(boundingBox: bounds, isSelected: isSelected, isUserCreated: isUserCreated) + } + // For compatibility with the original struct - convenience init(rect: CGRect, isSelected: Bool = false) { - self.init(bounds: rect, isSelected: isSelected) + init(rect: CGRect, isSelected: Bool = false) { + self.init(boundingBox: rect, isSelected: isSelected) } - + + var bounds: CGRect { + boundingBox + } + // Calculate scaled rectangle for display in UI func scaledRect(originalSize: CGSize, displaySize: CGSize) -> CGRect { // Calculate scale factors for width and height let scaleX = displaySize.width / originalSize.width let scaleY = displaySize.height / originalSize.height - + // Use minimum scale to maintain aspect ratio let scale = min(scaleX, scaleY) - + // Calculate the new origin and size for the rectangle - let scaledWidth = bounds.width * scale - let scaledHeight = bounds.height * scale - + let scaledWidth = boundingBox.width * scale + let scaledHeight = boundingBox.height * scale + // Calculate offsets to center the image within the available space let offsetX = (displaySize.width - originalSize.width * scale) / 2 let offsetY = (displaySize.height - originalSize.height * scale) / 2 - - let scaledX = bounds.origin.x * scale + offsetX - let scaledY = bounds.origin.y * scale + offsetY - + + let scaledX = boundingBox.origin.x * scale + offsetX + let scaledY = boundingBox.origin.y * scale + offsetY + return CGRect(x: scaledX, y: scaledY, width: scaledWidth, height: scaledHeight) } - - // Function to adjust bounds when the box is resized - func resize(by scale: CGFloat) -> DetectedFace { - // Calculate the new center point - let centerX = bounds.midX - let centerY = bounds.midY - - // Calculate new width and height - let newWidth = bounds.width * scale - let newHeight = bounds.height * scale - - // Calculate new origin - let newX = centerX - newWidth / 2 - let newY = centerY - newHeight / 2 - - // Create new face with updated bounds - let newFace = DetectedFace( - bounds: CGRect(x: newX, y: newY, width: newWidth, height: newHeight), - isSelected: self.isSelected, - isUserCreated: self.isUserCreated - ) - - return newFace - } -} \ No newline at end of file +} diff --git a/SnapSafe/Models/MaskMode.swift b/SnapSafe/Models/MaskMode.swift index 203710f..ae50c56 100644 --- a/SnapSafe/Models/MaskMode.swift +++ b/SnapSafe/Models/MaskMode.swift @@ -8,9 +8,25 @@ import Foundation // Different masking modes for face obfuscation -enum MaskMode { +enum MaskMode: String, CaseIterable, Codable { + case none case blur case pixelate case blackout case noise -} \ No newline at end of file + + var displayName: String { + switch self { + case .none: + "None" + case .blur: + "Blur" + case .pixelate: + "Pixelate" + case .blackout: + "Blackout" + case .noise: + "Noise" + } + } +} diff --git a/SnapSafe/Models/PhotoMetadata.swift b/SnapSafe/Models/PhotoMetadata.swift new file mode 100644 index 0000000..8d65841 --- /dev/null +++ b/SnapSafe/Models/PhotoMetadata.swift @@ -0,0 +1,46 @@ +// +// PhotoMetadata.swift +// SnapSafe +// +// Created by Bill Booth on 5/28/25. +// + +import Foundation + +struct PhotoMetadata: Codable, Equatable { + let id: String + let creationDate: Date + let modificationDate: Date + let fileSize: Int + let faces: [DetectedFace] + let maskMode: MaskMode + let isDecoy: Bool + + init(id: String, creationDate: Date = Date(), modificationDate: Date = Date(), fileSize: Int, faces: [DetectedFace] = [], maskMode: MaskMode = .none, isDecoy: Bool = false) { + self.id = id + self.creationDate = creationDate + self.modificationDate = modificationDate + self.fileSize = fileSize + self.faces = faces + self.maskMode = maskMode + self.isDecoy = isDecoy + } +} + +struct PhotoPredicate { + let dateRange: ClosedRange? + let hasFaces: Bool? + let maskMode: MaskMode? + + init(dateRange: ClosedRange? = nil, hasFaces: Bool? = nil, maskMode: MaskMode? = nil) { + self.dateRange = dateRange + self.hasFaces = hasFaces + self.maskMode = maskMode + } +} + +enum ExportFormat { + case jpeg(quality: CGFloat) + case png + case heic +} diff --git a/SnapSafe/Models/SecurePhoto.swift b/SnapSafe/Models/SecurePhoto.swift deleted file mode 100644 index 52af61d..0000000 --- a/SnapSafe/Models/SecurePhoto.swift +++ /dev/null @@ -1,238 +0,0 @@ -// -// SecurePhoto.swift -// SnapSafe -// -// Created by Bill Booth on 5/20/25. -// - -import UIKit - -class SecurePhoto: Identifiable, Equatable { - let id = UUID() - let filename: String - var metadata: [String: Any] - let fileURL: URL - - // Memory tracking - var isVisible: Bool = false - private var lastAccessTime: Date = .init() - - // Use lazy loading for images to reduce memory usage - private var _thumbnail: UIImage? - private var _fullImage: UIImage? - - // Track if the photo is in landscape orientation (width > height) - private var _isLandscape: Bool? - - // Computed property to check if the photo is in landscape orientation - var isLandscape: Bool { - // Check if we have orientation info in metadata (always check metadata first) - if let isLandscape = metadata["isLandscape"] as? Bool { - return isLandscape - } - - // If we've already calculated the orientation from image dimensions, return cached value - if let cachedOrientation = _isLandscape { - return cachedOrientation - } - - // Check the orientation value - let orientation = originalOrientation.rawValue - - // Orientations 5-8 are 90/270 degree rotations (landscape) - // For these, we need to swap width/height for comparison - let isRotated = orientation >= 5 && orientation <= 8 - - // Otherwise, load the full image and determine orientation by dimensions - let image = fullImage - let isLandscape: Bool - - if isRotated { - // For rotated images, swap width/height for comparison - isLandscape = image.size.height > image.size.width - } else { - // For normal orientation, compare directly - isLandscape = image.size.width > image.size.height - } - - // Cache the result (only cache calculated values, not metadata values) - _isLandscape = isLandscape - - return isLandscape - } - - // Helper to get the correct dimensions for display based on orientation - func frameSizeForDisplay(cellSize: CGFloat = 100) -> (width: CGFloat, height: CGFloat) { - let orientation = originalOrientation.rawValue - let isRotated = orientation >= 5 && orientation <= 8 - - // For landscape photos or rotated portrait photos (which become landscape) - if (isLandscape && !isRotated) || (!isLandscape && isRotated) { - return (width: cellSize, height: cellSize * (thumbnail.size.height / thumbnail.size.width)) - } - // For portrait photos or rotated landscape photos (which become portrait) - else { - return (width: cellSize * (thumbnail.size.width / thumbnail.size.height), height: cellSize) - } - } - - // Original orientation of the image from EXIF data - var originalOrientation: UIImage.Orientation { - // First check for our stored orientation in metadata - if let orientationValue = metadata["originalOrientation"] as? Int { - // Convert EXIF orientation (1-8) to UIImage.Orientation - switch orientationValue { - case 1: return .up // Normal - case 2: return .upMirrored // Mirrored horizontally - case 3: return .down // Rotated 180° - case 4: return .downMirrored // Mirrored vertically - case 5: return .leftMirrored // Mirrored horizontally, then rotated 90° CCW - case 6: return .right // Rotated 90° CW - case 7: return .rightMirrored // Mirrored horizontally, then rotated 90° CW - case 8: return .left // Rotated 90° CCW - default: return .up // Default to up if invalid - } - } - - // Otherwise, inspect the image directly - if let image = _fullImage { - return image.imageOrientation - } - - // Default to up if we can't determine - return .up - } - - // Computed property to check if this photo is marked as a decoy - var isDecoy: Bool { - return metadata["isDecoy"] as? Bool ?? false - } - - // Function to mark/unmark as decoy - func setDecoyStatus(_ isDecoy: Bool) { - metadata["isDecoy"] = isDecoy - - // Save updated metadata back to disk - DispatchQueue.global(qos: .userInitiated).async { [weak self] in - guard let self = self else { return } - do { - let secureFileManager = SecureFileManager() - let metadataURL = try secureFileManager.getSecureDirectory().appendingPathComponent("\(filename).metadata") - let metadataData = try JSONSerialization.data(withJSONObject: metadata, options: []) - try metadataData.write(to: metadataURL) - print("Updated decoy status for photo: \(filename)") - } catch { - print("Error updating decoy status: \(error.localizedDescription)") - } - } - } - - // Thumbnail is loaded on demand and cached - var thumbnail: UIImage { - // Update last access time and mark as visible (always do this when thumbnail is accessed) - lastAccessTime = Date() - isVisible = true - - if let cachedThumbnail = _thumbnail { - return cachedThumbnail - } - - // Load thumbnail if needed - do { - if let thumb = try secureFileManager.loadPhotoThumbnail(from: fileURL) { - // Store the loaded thumbnail (with its original orientation) - _thumbnail = thumb - - // Return the thumbnail, respecting its orientation - // Note: We don't normalize the orientation here to preserve the original aspect ratio - return thumb - } - } catch { - print("Error loading thumbnail: \(error)") - } - - // Fallback to placeholder - return UIImage(systemName: "photo") ?? UIImage() - } - - // Full image is loaded on demand - var fullImage: UIImage { - // Update last access time and mark as visible (always do this when fullImage is accessed) - lastAccessTime = Date() - isVisible = true - - if let cachedFullImage = _fullImage { - return cachedFullImage - } - - // Load full image if needed - do { - let (data, _) = try secureFileManager.loadPhoto(filename: filename) - if let img = UIImage(data: data) { - // Store the image with its original orientation - _fullImage = img - - // When we load a full image, notify the memory manager - MemoryManager.shared.reportFullImageLoaded() - - // Return the image with its original orientation preserved - return img - } - } catch { - print("Error loading full image: \(error)") - } - - // Fallback to thumbnail - return thumbnail - } - - // Mark as no longer visible in the UI - func markAsInvisible() { - isVisible = false - } - - // Get the time since this photo was last accessed - var timeSinceLastAccess: TimeInterval { - return Date().timeIntervalSince(lastAccessTime) - } - - // Clear memory when no longer needed - func clearMemory(keepThumbnail: Bool = true) { - if _fullImage != nil { - _fullImage = nil - - // Notify memory manager when we free a full image - MemoryManager.shared.reportFullImageUnloaded() - } - - if !keepThumbnail && _thumbnail != nil { - _thumbnail = nil - - // Notify memory manager when we free a thumbnail - MemoryManager.shared.reportThumbnailUnloaded() - } - } - - init(filename: String, metadata: [String: Any], fileURL: URL, preloadedThumbnail: UIImage? = nil) { - self.filename = filename - self.metadata = metadata - self.fileURL = fileURL - _thumbnail = preloadedThumbnail - } - - // Legacy initializer for compatibility - convenience init(filename: String, thumbnail: UIImage, fullImage: UIImage, metadata: [String: Any]) { - self.init(filename: filename, metadata: metadata, fileURL: URL(fileURLWithPath: "")) - _thumbnail = thumbnail - _fullImage = fullImage - } - - // Implement Equatable - static func == (lhs: SecurePhoto, rhs: SecurePhoto) -> Bool { - // Compare by id and filename - return lhs.id == rhs.id && lhs.filename == rhs.filename - } - - // Shared file manager instance - private let secureFileManager = SecureFileManager() -} diff --git a/SnapSafe/PhotoExportUseCase.swift b/SnapSafe/PhotoExportUseCase.swift new file mode 100644 index 0000000..48911e5 --- /dev/null +++ b/SnapSafe/PhotoExportUseCase.swift @@ -0,0 +1,66 @@ +// +// PhotoExportUseCase.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation +import UIKit + +final class PhotoExportUseCase { + private let repository: SecureImageRepositoryProtocol + + init(repository: SecureImageRepositoryProtocol) { + self.repository = repository + } + + func exportPhoto(_ photo: SecurePhoto, format: ExportFormat = .jpeg(quality: 0.9)) async throws -> Data { + try await repository.exportPhoto(photo, format: format) + } + + func exportPhotoToPhotoLibrary(_ photo: SecurePhoto) async throws { + let imageData = try await repository.exportPhoto(photo, format: .jpeg(quality: 0.9)) + + guard let image = UIImage(data: imageData) else { + throw PhotoExportError.exportFailed(reason: "Failed to create image from exported data") + } + + // Save to photo library (this would require PhotosFramework integration) + // For now, just validate the export worked + print("Successfully exported photo \(photo.id) to photo library") + } + + func exportMultiplePhotos(_ photos: [SecurePhoto], format: ExportFormat = .jpeg(quality: 0.9)) async throws -> [String: Data] { + var exportedPhotos: [String: Data] = [:] + + for photo in photos { + do { + let data = try await exportPhoto(photo, format: format) + exportedPhotos[photo.id] = data + } catch { + print("Failed to export photo \(photo.id): \(error)") + throw PhotoExportError.batchExportFailed(photoId: photo.id, error: error) + } + } + + return exportedPhotos + } +} + +enum PhotoExportError: Error, LocalizedError { + case exportFailed(reason: String) + case batchExportFailed(photoId: String, error: Error) + case photoLibraryAccessDenied + + var errorDescription: String? { + switch self { + case let .exportFailed(reason): + "Export failed: \(reason)" + case let .batchExportFailed(photoId, error): + "Batch export failed for photo \(photoId): \(error.localizedDescription)" + case .photoLibraryAccessDenied: + "Access to photo library denied" + } + } +} diff --git a/SnapSafe/AuthManager.swift b/SnapSafe/Repositories/AuthManager.swift similarity index 100% rename from SnapSafe/AuthManager.swift rename to SnapSafe/Repositories/AuthManager.swift diff --git a/SnapSafe/EncryptionManager.swift b/SnapSafe/Repositories/EncryptionManager.swift similarity index 100% rename from SnapSafe/EncryptionManager.swift rename to SnapSafe/Repositories/EncryptionManager.swift diff --git a/SnapSafe/FaceDetector.swift b/SnapSafe/Repositories/FaceDetector.swift similarity index 95% rename from SnapSafe/FaceDetector.swift rename to SnapSafe/Repositories/FaceDetector.swift index c4038dc..3847fe6 100644 --- a/SnapSafe/FaceDetector.swift +++ b/SnapSafe/Repositories/FaceDetector.swift @@ -11,7 +11,6 @@ import Security import UIKit import Vision - class FaceDetector { // Detect faces and return as DetectedFace objects func detectFaces(in image: UIImage, completion: @escaping ([DetectedFace]) -> Void) { @@ -83,7 +82,7 @@ class FaceDetector { // Process faces with specified masking modes with memory optimizations func maskFaces(in image: UIImage, faces: [DetectedFace], modes: [MaskMode]) -> UIImage? { // Only process selected faces - let selectedFaces = faces.filter { $0.isSelected } + let selectedFaces = faces.filter(\.isSelected) if selectedFaces.isEmpty || modes.isEmpty { return image @@ -119,6 +118,12 @@ class FaceDetector { // Apply the appropriate masking effect switch primaryMode { + case .none: + // For none, don't apply any masking (just restore original) + if let faceCGImage = workingImage.cgImage?.cropping(to: safeRect) { + UIImage(cgImage: faceCGImage).draw(in: safeRect) + } + case .blackout: // For blackout, just fill with black context.setFillColor(UIColor.black.cgColor) @@ -216,27 +221,7 @@ class FaceDetector { // Blur faces with default blur mode func blurFaces(in image: UIImage, faces: [DetectedFace]) -> UIImage? { - return maskFaces(in: image, faces: faces, modes: [.blur]) - } - - // MARK: - Face Masking Implementations - - // Blackout a region of the image - private func blackout(image: UIImage, rect: CGRect) -> UIImage? { - UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale) - defer { UIGraphicsEndImageContext() } - - image.draw(at: .zero) - - guard let context = UIGraphicsGetCurrentContext() else { - return nil - } - - // Fill the rect with black - context.setFillColor(UIColor.black.cgColor) - context.fill(rect) - - return UIGraphicsGetImageFromCurrentImageContext() + maskFaces(in: image, faces: faces, modes: [.blur]) } // Pixelate a region of the image @@ -265,7 +250,7 @@ class FaceDetector { let smallImage = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() - guard var smallImage = smallImage else { return nil } + guard var smallImage else { return nil } // Step 2: Optionally add noise to the small image if addNoise { diff --git a/SnapSafe/FileManager.swift b/SnapSafe/Repositories/FileManager.swift similarity index 97% rename from SnapSafe/FileManager.swift rename to SnapSafe/Repositories/FileManager.swift index a608091..d583ee5 100644 --- a/SnapSafe/FileManager.swift +++ b/SnapSafe/Repositories/FileManager.swift @@ -13,7 +13,8 @@ import SwiftUI class SecureFileManager { private let fileManager = FileManager.default - // Get a secure directory that's not backed up to iCloud + // Directory isn't backed up. Local only. + // mechanism: set the "do not backup" attribute func getSecureDirectory() throws -> URL { guard let documentsDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask).first else { throw NSError(domain: "com.securecamera", code: -1, userInfo: nil) @@ -24,7 +25,6 @@ class SecureFileManager { if !fileManager.fileExists(atPath: secureDirectory.path) { try fileManager.createDirectory(at: secureDirectory, withIntermediateDirectories: true, attributes: nil) - // Set the "do not backup" attribute var resourceValues = URLResourceValues() resourceValues.isExcludedFromBackup = true var secureDirectoryWithAttributes = secureDirectory @@ -37,14 +37,14 @@ class SecureFileManager { // Save photo with UTC timestamp filename for better chronological sorting func savePhoto(_ photoData: Data, withMetadata metadata: [String: Any] = [:], isEdited: Bool = false, originalFilename: String? = nil) throws -> String { let secureDirectory = try getSecureDirectory() - + // Generate UTC timestamp filename with microsecond precision + UUID suffix for uniqueness let dateFormatter = ISO8601DateFormatter() dateFormatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds] let utcTimestamp = dateFormatter.string(from: Date()) .replacingOccurrences(of: ":", with: "") .replacingOccurrences(of: "-", with: "") - + // Add short UUID suffix to guarantee uniqueness for rapid saves let uuidSuffix = UUID().uuidString.prefix(8) let filename = "\(utcTimestamp)_\(uuidSuffix)" @@ -59,13 +59,13 @@ class SecureFileManager { // Add creation date to metadata for sorting let now = Date() serializedMetadata["creationDate"] = now.timeIntervalSince1970 - + // Mark as edited if specified if isEdited { serializedMetadata["isEdited"] = true - + // Link to original photo if provided - if let originalFilename = originalFilename { + if let originalFilename { serializedMetadata["originalFilename"] = originalFilename } } @@ -84,19 +84,19 @@ class SecureFileManager { return filename } - + // Creates a temporary file for sharing with a UUID filename func preparePhotoForSharing(imageData: Data) throws -> URL { // Get temporary directory let tempDirectory = FileManager.default.temporaryDirectory - + // Create UUID filename for sharing let uuid = UUID().uuidString let tempFileURL = tempDirectory.appendingPathComponent("\(uuid).jpg") - + // Write the data to the temporary file try imageData.write(to: tempFileURL) - + return tempFileURL } diff --git a/SnapSafe/KeyManager.swift b/SnapSafe/Repositories/KeyManager.swift similarity index 100% rename from SnapSafe/KeyManager.swift rename to SnapSafe/Repositories/KeyManager.swift diff --git a/SnapSafe/LocationManager.swift b/SnapSafe/Repositories/LocationManager.swift similarity index 82% rename from SnapSafe/LocationManager.swift rename to SnapSafe/Repositories/LocationManager.swift index 4e36060..73425be 100644 --- a/SnapSafe/LocationManager.swift +++ b/SnapSafe/Repositories/LocationManager.swift @@ -28,7 +28,8 @@ class LocationManager: NSObject, ObservableObject { locationManager.desiredAccuracy = kCLLocationAccuracyBest // Load saved user preference for location data inclusion - shouldIncludeLocationData = UserDefaults.standard.bool(forKey: "shouldIncludeLocationData") + shouldIncludeLocationData = UserDefaults.standard.bool( + forKey: "shouldIncludeLocationData") // Get the current authorization status authorizationStatus = locationManager.authorizationStatus @@ -42,7 +43,9 @@ class LocationManager: NSObject, ObservableObject { // Function to start location updates if we have permission func startUpdatingLocation() { // Only start updates if we have permission and the user wants location data - if authorizationStatus == .authorizedWhenInUse && shouldIncludeLocationData { + if authorizationStatus == .authorizedWhenInUse, + shouldIncludeLocationData + { locationManager.startUpdatingLocation() } } @@ -55,7 +58,10 @@ class LocationManager: NSObject, ObservableObject { // Function to get the current location metadata for a photo func getCurrentLocationMetadata() -> [String: Any]? { // If the user doesn't want location data or we don't have permission, return nil - if !shouldIncludeLocationData || (authorizationStatus != .authorizedWhenInUse && authorizationStatus != .authorizedAlways) { + if !shouldIncludeLocationData + || (authorizationStatus != .authorizedWhenInUse + && authorizationStatus != .authorizedAlways) + { return nil } @@ -78,14 +84,17 @@ class LocationManager: NSObject, ObservableObject { // Altitude if location.verticalAccuracy > 0 { - gpsDict[String(kCGImagePropertyGPSAltitudeRef)] = location.altitude < 0 ? 1 : 0 - gpsDict[String(kCGImagePropertyGPSAltitude)] = abs(location.altitude) + gpsDict[String(kCGImagePropertyGPSAltitudeRef)] = + location.altitude < 0 ? 1 : 0 + gpsDict[String(kCGImagePropertyGPSAltitude)] = abs( + location.altitude) } // Timestamp let dateFormatter = DateFormatter() dateFormatter.dateFormat = "yyyy:MM:dd HH:mm:ss" - gpsDict[String(kCGImagePropertyGPSDateStamp)] = dateFormatter.string(from: location.timestamp) + gpsDict[String(kCGImagePropertyGPSDateStamp)] = + dateFormatter.string(from: location.timestamp) // Create the GPS metadata dictionary return [String(kCGImagePropertyGPSDictionary): gpsDict] @@ -131,7 +140,10 @@ extension LocationManager: CLLocationManagerDelegate { authorizationStatus = manager.authorizationStatus // Start or stop location updates based on new authorization - if shouldIncludeLocationData && (authorizationStatus == .authorizedWhenInUse || authorizationStatus == .authorizedAlways) { + if shouldIncludeLocationData, + authorizationStatus == .authorizedWhenInUse + || authorizationStatus == .authorizedAlways + { startUpdatingLocation() } else { stopUpdatingLocation() @@ -139,7 +151,9 @@ extension LocationManager: CLLocationManagerDelegate { } // Called when a new location is available - func locationManager(_: CLLocationManager, didUpdateLocations locations: [CLLocation]) { + func locationManager( + _: CLLocationManager, didUpdateLocations locations: [CLLocation] + ) { guard let location = locations.last else { return } // Use the most recent location @@ -148,6 +162,7 @@ extension LocationManager: CLLocationManagerDelegate { // Called when there's an error getting location func locationManager(_: CLLocationManager, didFailWithError error: Error) { - print("Location Manager failed with error: \(error.localizedDescription)") + print( + "Location Manager failed with error: \(error.localizedDescription)") } } diff --git a/SnapSafe/Models/MemoryManager.swift b/SnapSafe/Repositories/MemoryManager.swift similarity index 99% rename from SnapSafe/Models/MemoryManager.swift rename to SnapSafe/Repositories/MemoryManager.swift index 8726372..abd2e4c 100644 --- a/SnapSafe/Models/MemoryManager.swift +++ b/SnapSafe/Repositories/MemoryManager.swift @@ -110,4 +110,4 @@ class MemoryManager { loadedFullImages = 0 loadedThumbnails = 0 } -} \ No newline at end of file +} diff --git a/SnapSafe/PINManager.swift b/SnapSafe/Repositories/PINManager.swift similarity index 95% rename from SnapSafe/PINManager.swift rename to SnapSafe/Repositories/PINManager.swift index 2bb2c7c..1f4b5e3 100644 --- a/SnapSafe/PINManager.swift +++ b/SnapSafe/Repositories/PINManager.swift @@ -5,85 +5,85 @@ // Created by Bill Booth on 5/22/25. // +import Combine import Foundation import SwiftUI -import Combine class PINManager: ObservableObject { // Singleton instance static let shared = PINManager() - + // Published properties for observers @Published var isPINSet: Bool = false @Published var requirePINOnResume: Bool = true - @Published var lastActiveTime: Date = Date() - + @Published var lastActiveTime: Date = .init() + // Keys for UserDefaults private let pinKey = "snapSafe.userPIN" private let pinSetKey = "snapSafe.isPINSet" private let requirePINOnResumeKey = "snapSafe.requirePINOnResume" - + // Computed property to check if PIN is set private var userDefaults = UserDefaults.standard - + private init() { // Load initial values from UserDefaults isPINSet = userDefaults.bool(forKey: pinSetKey) requirePINOnResume = userDefaults.bool(forKey: requirePINOnResumeKey, defaultValue: true) - + print("PINManager initialized - PIN is set: \(isPINSet), require PIN on resume: \(requirePINOnResume)") - + // Update last active time updateLastActiveTime() } - + // Set the PIN func setPIN(_ pin: String) { // Store PIN (not encrypted for now as requested) userDefaults.setValue(pin, forKey: pinKey) userDefaults.setValue(true, forKey: pinSetKey) print("PIN has been set, isPINSet flag set to true") - + // Update published property DispatchQueue.main.async { self.isPINSet = true } } - + // Verify the PIN func verifyPIN(_ pin: String) -> Bool { guard let storedPIN = userDefaults.string(forKey: pinKey) else { print("No stored PIN found for verification") return false } - + // Simple comparison for now let isMatch = pin == storedPIN print("PIN verification: \(isMatch ? "successful" : "failed")") return isMatch } - + // Set the requirePINOnResume flag func setRequirePINOnResume(_ require: Bool) { userDefaults.setValue(require, forKey: requirePINOnResumeKey) print("Set requirePINOnResume to: \(require)") - + // Update published property DispatchQueue.main.async { self.requirePINOnResume = require } } - + // Update the last active time func updateLastActiveTime() { lastActiveTime = Date() } - + // Clear the PIN (for testing or reset) func clearPIN() { userDefaults.removeObject(forKey: pinKey) userDefaults.setValue(false, forKey: pinSetKey) - + // Update published property DispatchQueue.main.async { self.isPINSet = false diff --git a/SnapSafe/ScreenCaptureManager.swift b/SnapSafe/Repositories/ScreenCaptureManager.swift similarity index 55% rename from SnapSafe/ScreenCaptureManager.swift rename to SnapSafe/Repositories/ScreenCaptureManager.swift index e39e243..a345a76 100644 --- a/SnapSafe/ScreenCaptureManager.swift +++ b/SnapSafe/Repositories/ScreenCaptureManager.swift @@ -5,28 +5,27 @@ // Created by Bill Booth on 5/22/25. // -import SwiftUI import Combine +import SwiftUI -/// Manager class to handle screen recording and screenshot detection class ScreenCaptureManager: ObservableObject { // Singleton instance static let shared = ScreenCaptureManager() - + // Published properties for observers @Published var isScreenBeingRecorded = false @Published var screenshotTaken = false - + // Timer to reset screenshot taken flag private var screenshotResetTimer: Timer? - + // Private initializer for singleton private init() { startCaptureMonitoring() startScreenshotMonitoring() } - - /// Start monitoring for screen recording + + // Start monitoring for screen recording func startCaptureMonitoring() { // Add observer for screen recording status changes NotificationCenter.default.addObserver( @@ -36,26 +35,26 @@ class ScreenCaptureManager: ObservableObject { ) { [weak self] _ in self?.handleCaptureChange() } - + // Check initial state handleCaptureChange() } - - /// Handle changes in screen recording status + + // Handle changes in screen recording status private func handleCaptureChange() { // Update the published property on the main thread DispatchQueue.main.async { [weak self] in self?.isScreenBeingRecorded = UIScreen.main.isCaptured - + if UIScreen.main.isCaptured { - print("🔴 Screen recording detected!") + print("Screen recording detected!") } else { - print("✅ Screen recording stopped") + print("Screen recording stopped") } } } - - /// Start monitoring for screenshots + + // Start monitoring for screenshots func startScreenshotMonitoring() { // Add observer for screenshot notifications NotificationCenter.default.addObserver( @@ -66,28 +65,25 @@ class ScreenCaptureManager: ObservableObject { self?.handleScreenshotTaken() } } - - /// Handle screenshot taken event + + // Handle screenshot taken event private func handleScreenshotTaken() { - print("📸 Screenshot taken!") - + print("Screenshot taken!") + // Reset any existing timer screenshotResetTimer?.invalidate() - + // Update the flag to trigger UI updates DispatchQueue.main.async { [weak self] in self?.screenshotTaken = true - + // Reset the flag after a delay self?.screenshotResetTimer = Timer.scheduledTimer(withTimeInterval: 3.0, repeats: false) { _ in self?.screenshotTaken = false } } - - // Here you could also log the security event, show a warning, etc. } - - /// Stop monitoring when the manager is deallocated + deinit { NotificationCenter.default.removeObserver(self) } @@ -96,19 +92,19 @@ class ScreenCaptureManager: ObservableObject { // ViewModifier to apply screen recording protection struct ScreenRecordingProtection: ViewModifier { @ObservedObject private var captureManager = ScreenCaptureManager.shared - + func body(content: Content) -> some View { ZStack { // Original content content .opacity(captureManager.isScreenBeingRecorded ? 0 : 1) - + // Show blocking view if screen is being recorded if captureManager.isScreenBeingRecorded { ScreenRecordingBlockerView() .transition(.opacity) } - + // Show screenshot notification if screenshot was taken if captureManager.screenshotTaken { ScreenshotTakenView() @@ -116,72 +112,7 @@ struct ScreenRecordingProtection: ViewModifier { .zIndex(100) // Make sure it appears on top } } - .animation(.easeInOut(duration: 0.2), value: captureManager.isScreenBeingRecorded) - .animation(.easeInOut(duration: 0.3), value: captureManager.screenshotTaken) - } -} - -// View shown when screen recording is detected -struct ScreenRecordingBlockerView: View { - var body: some View { - ZStack { - // Background - Color.black - .edgesIgnoringSafeArea(.all) - - VStack(spacing: 30) { - // Warning icon - Image(systemName: "record.circle") - .font(.system(size: 80)) - .foregroundColor(.red) - .padding(.top, 60) - - // Warning message - Text("Screen Recording Detected") - .font(.system(size: 24, weight: .bold)) - .foregroundColor(.white) - - Text("For privacy and security reasons, screen recording is not allowed in SnapSafe.") - .font(.system(size: 16)) - .foregroundColor(.gray) - .multilineTextAlignment(.center) - .padding(.horizontal, 40) - - Text("Please stop recording to continue using the app.") - .font(.system(size: 16, weight: .semibold)) - .foregroundColor(.white) - .padding(.top, 20) - - Spacer() - } - .frame(maxWidth: .infinity, maxHeight: .infinity) - } - } -} - -// View shown when a screenshot is taken -struct ScreenshotTakenView: View { - var body: some View { - VStack { - HStack(spacing: 15) { - Image(systemName: "exclamationmark.triangle.fill") - .foregroundColor(.yellow) - .font(.system(size: 24)) - - Text("Screenshot Captured") - .font(.system(size: 16, weight: .semibold)) - .foregroundColor(.white) - - Spacer() - } - .padding() - .background(Color.black.opacity(0.8)) - .cornerRadius(10) - .padding(.horizontal) - .padding(.top, 10) - - Spacer() - } + // NO ANIMATIONS - data can leak during the transition between views! } } @@ -199,12 +130,12 @@ extension View { VStack { Text("Sensitive Content") .font(.largeTitle) - + Image(systemName: "person.crop.square") .font(.system(size: 100)) } - + // Preview with screen recording blocker ScreenRecordingBlockerView() } -} \ No newline at end of file +} diff --git a/SnapSafe/Repositories/SecurePhotoRepository.swift b/SnapSafe/Repositories/SecurePhotoRepository.swift new file mode 100644 index 0000000..5a66a4d --- /dev/null +++ b/SnapSafe/Repositories/SecurePhotoRepository.swift @@ -0,0 +1,169 @@ +// +// SecurePhotoRepository.swift +// SnapSafe +// +// Created by Bill Booth on 5/20/25. +// + +import CryptoKit +import Foundation +import UIKit + +class SecurePhoto: Identifiable, Equatable { + let id: String + let encryptedData: Data + let metadata: PhotoMetadata + + // Memory tracking + var isVisible: Bool = false + private var lastAccessTime: Date = .init() + + // Use lazy loading for images to reduce memory usage + private var _thumbnail: UIImage? + private var _fullImage: UIImage? + + // Cache for decrypted images + var cachedImage: UIImage? + var cachedThumbnail: UIImage? + + // Thumbnail is loaded on demand and cached + var thumbnail: UIImage { + // Update last access time and mark as visible + lastAccessTime = Date() + isVisible = true + + // Check for cached thumbnail first + if let cachedThumbnail { + return cachedThumbnail + } + + if let legacyThumbnail = _thumbnail { + return legacyThumbnail + } + + // Fallback to placeholder + return UIImage(systemName: "photo") ?? UIImage() + } + + // Method to get thumbnail using decrypted data +// func thumbnail(from decryptedData: Data) -> UIImage? { +// // Check cache first +// if let cached = cachedThumbnail { +// return cached +// } +// +// // Generate thumbnail from decrypted data +// guard let fullImage = UIImage(data: decryptedData) else { +// return nil +// } +// +// // Generate thumbnail +// let thumbnailSize = CGSize(width: 200, height: 200) +// let renderer = UIGraphicsImageRenderer(size: thumbnailSize) +// let thumbnail = renderer.image { _ in +// fullImage.draw(in: CGRect(origin: .zero, size: thumbnailSize)) +// } +// +// // Cache the thumbnail +// cachedThumbnail = thumbnail +// return thumbnail +// } + + // Store decrypted image in cache +// func cacheDecryptedImage(_ decryptedData: Data) -> UIImage? { +// // Update last access time and mark as visible +// lastAccessTime = Date() +// isVisible = true +// +// // Check cache first +// if let cached = cachedImage { +// return cached +// } +// +// // Create image from decrypted data +// guard let image = UIImage(data: decryptedData) else { +// return nil +// } +// +// // Cache the image +// cachedImage = image +// +// // Notify memory manager +// MemoryManager.shared.reportFullImageLoaded() +// +// return image +// } + + // Mark as no longer visible in the UI + func markAsInvisible() { + isVisible = false + } + + // Get the time since this photo was last accessed + var timeSinceLastAccess: TimeInterval { + Date().timeIntervalSince(lastAccessTime) + } + + // Clear memory when no longer needed + func clearMemory(keepThumbnail: Bool = true) { + if cachedImage != nil { + cachedImage = nil + MemoryManager.shared.reportFullImageUnloaded() + } + + if _fullImage != nil { + _fullImage = nil + MemoryManager.shared.reportFullImageUnloaded() + } + + if !keepThumbnail { + if cachedThumbnail != nil { + cachedThumbnail = nil + MemoryManager.shared.reportThumbnailUnloaded() + } + + if _thumbnail != nil { + _thumbnail = nil + MemoryManager.shared.reportThumbnailUnloaded() + } + } + } + + // Computed property for backward compatibility + var isDecoy: Bool { + metadata.isDecoy + } + + // Computed property for backward compatibility - returns cached image if available + var fullImage: UIImage { + cachedImage ?? thumbnail + } + + // New repository-compatible initializer + init(id: String, encryptedData: Data, metadata: PhotoMetadata, cachedImage: UIImage? = nil, cachedThumbnail: UIImage? = nil) { + self.id = id + self.encryptedData = encryptedData + self.metadata = metadata + self.cachedImage = cachedImage + self.cachedThumbnail = cachedThumbnail + } + + // Implement Equatable + static func == (lhs: SecurePhoto, rhs: SecurePhoto) -> Bool { + lhs.id == rhs.id + } +} + +//enum SecurePhotoError: Error, LocalizedError { +// case invalidImageData +// case decryptionFailed +// +// var errorDescription: String? { +// switch self { +// case .invalidImageData: +// "Invalid image data" +// case .decryptionFailed: +// "Failed to decrypt image" +// } +// } +//} diff --git a/SnapSafe/Repository/DataSources/CacheDataSource.swift b/SnapSafe/Repository/DataSources/CacheDataSource.swift new file mode 100644 index 0000000..333a2f5 --- /dev/null +++ b/SnapSafe/Repository/DataSources/CacheDataSource.swift @@ -0,0 +1,69 @@ +// +// CacheDataSource.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation +import UIKit + +final class CacheDataSource: CacheDataSourceProtocol { + private let imageCache = NSCache() + private let thumbnailCache = NSCache() + private let preloadQueue = DispatchQueue(label: "com.snapsafe.cache.preload", qos: .background) + + init() { + setupCaches() + } + + func cacheImage(_ image: UIImage, forId id: String) { + imageCache.setObject(image, forKey: NSString(string: id)) + } + + func getCachedImage(forId id: String) -> UIImage? { + imageCache.object(forKey: NSString(string: id)) + } + + func cacheThumbnail(_ thumbnail: UIImage, forId id: String) { + thumbnailCache.setObject(thumbnail, forKey: NSString(string: "\(id)_thumb")) + } + + func getCachedThumbnail(forId id: String) -> UIImage? { + thumbnailCache.object(forKey: NSString(string: "\(id)_thumb")) + } + + func clearCache() { + imageCache.removeAllObjects() + thumbnailCache.removeAllObjects() + } + + func clearCacheForId(_ id: String) { + imageCache.removeObject(forKey: NSString(string: id)) + thumbnailCache.removeObject(forKey: NSString(string: "\(id)_thumb")) + } + + func preloadImages(ids: [String], priority: CachePriority) { + let qos: DispatchQoS = switch priority { + case .high: .userInitiated + case .normal: .default + case .low: .background + } + + DispatchQueue.global(qos: qos.qosClass).async { + for id in ids { + // Preloading would be handled by the repository + // This is a placeholder for preload coordination + } + } + } + + private func setupCaches() { + // Configure cache limits + imageCache.countLimit = 20 // Store up to 20 full-size images + imageCache.totalCostLimit = 100 * 1024 * 1024 // 100MB for full images + + thumbnailCache.countLimit = 100 // Store up to 100 thumbnails + thumbnailCache.totalCostLimit = 20 * 1024 * 1024 // 20MB for thumbnails + } +} diff --git a/SnapSafe/Repository/DataSources/EncryptionDataSource.swift b/SnapSafe/Repository/DataSources/EncryptionDataSource.swift new file mode 100644 index 0000000..bf0a508 --- /dev/null +++ b/SnapSafe/Repository/DataSources/EncryptionDataSource.swift @@ -0,0 +1,33 @@ +// +// EncryptionDataSource.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import CryptoKit +import Foundation + +final class EncryptionDataSource: EncryptionDataSourceProtocol { + private let keyManager: KeyManager + + init(keyManager: KeyManager) { + self.keyManager = keyManager + } + + func encryptImageData(_ data: Data) async throws -> Data { + let key = try await keyManager.getOrCreateEncryptionKey() + let sealedBox = try AES.GCM.seal(data, using: key) + return sealedBox.combined! + } + + func decryptImageData(_ encryptedData: Data) async throws -> Data { + let key = try await keyManager.getOrCreateEncryptionKey() + let sealedBox = try AES.GCM.SealedBox(combined: encryptedData) + return try AES.GCM.open(sealedBox, using: key) + } + + func generateSecureKey() async throws -> SymmetricKey { + SymmetricKey(size: .bits256) + } +} diff --git a/SnapSafe/Repository/DataSources/FileSystemDataSource.swift b/SnapSafe/Repository/DataSources/FileSystemDataSource.swift new file mode 100644 index 0000000..783fa23 --- /dev/null +++ b/SnapSafe/Repository/DataSources/FileSystemDataSource.swift @@ -0,0 +1,63 @@ +// +// FileSystemDataSource.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation + +final class FileSystemDataSource: FileSystemDataSourceProtocol { + private let documentsDirectory: URL + private let photosDirectory: URL + + init() throws { + documentsDirectory = try FileManager.default.url( + for: .documentDirectory, + in: .userDomainMask, + appropriateFor: nil, + create: true + ) + photosDirectory = documentsDirectory.appendingPathComponent("SecurePhotos") + try createPhotosDirectoryIfNeeded() + } + + func saveImageData(_ data: Data, withId id: String) async throws -> URL { + let fileURL = photosDirectory.appendingPathComponent("\(id).enc") + try data.write(to: fileURL) + return fileURL + } + + func loadImageData(withId id: String) async throws -> Data { + let fileURL = photosDirectory.appendingPathComponent("\(id).enc") + return try Data(contentsOf: fileURL) + } + + func deleteImageData(withId id: String) async throws { + let fileURL = photosDirectory.appendingPathComponent("\(id).enc") + try FileManager.default.removeItem(at: fileURL) + } + + func getAllImageIds() async throws -> [String] { + let contents = try FileManager.default.contentsOfDirectory(at: photosDirectory, includingPropertiesForKeys: nil) + return contents + .filter { $0.pathExtension == "enc" } + .map { $0.deletingPathExtension().lastPathComponent } + } + + func getImageURL(withId id: String) -> URL? { + let fileURL = photosDirectory.appendingPathComponent("\(id).enc") + return FileManager.default.fileExists(atPath: fileURL.path) ? fileURL : nil + } + + func imageExists(withId id: String) async -> Bool { + let fileURL = photosDirectory.appendingPathComponent("\(id).enc") + return FileManager.default.fileExists(atPath: fileURL.path) + } + + private func createPhotosDirectoryIfNeeded() throws { + if !FileManager.default.fileExists(atPath: photosDirectory.path) { + try FileManager.default.createDirectory(at: photosDirectory, withIntermediateDirectories: true) + } + } +} diff --git a/SnapSafe/Repository/DataSources/MetadataDataSource.swift b/SnapSafe/Repository/DataSources/MetadataDataSource.swift new file mode 100644 index 0000000..e1a0c30 --- /dev/null +++ b/SnapSafe/Repository/DataSources/MetadataDataSource.swift @@ -0,0 +1,93 @@ +// +// MetadataDataSource.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation + +final class MetadataDataSource: MetadataDataSourceProtocol { + private let documentsDirectory: URL + private let metadataDirectory: URL + + init() throws { + documentsDirectory = try FileManager.default.url( + for: .documentDirectory, + in: .userDomainMask, + appropriateFor: nil, + create: true + ) + metadataDirectory = documentsDirectory.appendingPathComponent("PhotoMetadata") + try createMetadataDirectoryIfNeeded() + } + + func saveMetadata(_ metadata: PhotoMetadata) async throws { + let fileURL = metadataDirectory.appendingPathComponent("\(metadata.id).json") + let data = try JSONEncoder().encode(metadata) + try data.write(to: fileURL) + } + + func loadMetadata(withId id: String) async throws -> PhotoMetadata? { + let fileURL = metadataDirectory.appendingPathComponent("\(id).json") + guard FileManager.default.fileExists(atPath: fileURL.path) else { return nil } + let data = try Data(contentsOf: fileURL) + return try JSONDecoder().decode(PhotoMetadata.self, from: data) + } + + func loadAllMetadata() async throws -> [PhotoMetadata] { + let contents = try FileManager.default.contentsOfDirectory(at: metadataDirectory, includingPropertiesForKeys: nil) + var allMetadata: [PhotoMetadata] = [] + + for fileURL in contents.filter({ $0.pathExtension == "json" }) { + let data = try Data(contentsOf: fileURL) + let metadata = try JSONDecoder().decode(PhotoMetadata.self, from: data) + allMetadata.append(metadata) + } + + return allMetadata.sorted { $0.creationDate > $1.creationDate } + } + + func deleteMetadata(withId id: String) async throws { + let fileURL = metadataDirectory.appendingPathComponent("\(id).json") + try FileManager.default.removeItem(at: fileURL) + } + + func updateMetadata(_ metadata: PhotoMetadata) async throws { + let updatedMetadata = PhotoMetadata( + id: metadata.id, + creationDate: metadata.creationDate, + modificationDate: Date(), + fileSize: metadata.fileSize, + faces: metadata.faces, + maskMode: metadata.maskMode + ) + try await saveMetadata(updatedMetadata) + } + + func findMetadata(matching predicate: PhotoPredicate) async throws -> [PhotoMetadata] { + let allMetadata = try await loadAllMetadata() + + return allMetadata.filter { metadata in + if let dateRange = predicate.dateRange { + guard dateRange.contains(metadata.creationDate) else { return false } + } + + if let hasFaces = predicate.hasFaces { + guard (metadata.faces.count > 0) == hasFaces else { return false } + } + + if let maskMode = predicate.maskMode { + guard metadata.maskMode == maskMode else { return false } + } + + return true + } + } + + private func createMetadataDirectoryIfNeeded() throws { + if !FileManager.default.fileExists(atPath: metadataDirectory.path) { + try FileManager.default.createDirectory(at: metadataDirectory, withIntermediateDirectories: true) + } + } +} diff --git a/SnapSafe/Repository/Protocols/DataSourceProtocols.swift b/SnapSafe/Repository/Protocols/DataSourceProtocols.swift new file mode 100644 index 0000000..e9aa6dd --- /dev/null +++ b/SnapSafe/Repository/Protocols/DataSourceProtocols.swift @@ -0,0 +1,58 @@ +// +// DataSourceProtocols.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import CryptoKit +import Foundation +import UIKit + +// MARK: - FileSystem Data Source Protocol + +protocol FileSystemDataSourceProtocol { + func saveImageData(_ data: Data, withId id: String) async throws -> URL + func loadImageData(withId id: String) async throws -> Data + func deleteImageData(withId id: String) async throws + func getAllImageIds() async throws -> [String] + func getImageURL(withId id: String) -> URL? + func imageExists(withId id: String) async -> Bool +} + +// MARK: - Encryption Data Source Protocol + +protocol EncryptionDataSourceProtocol { + func encryptImageData(_ data: Data) async throws -> Data + func decryptImageData(_ encryptedData: Data) async throws -> Data + func generateSecureKey() async throws -> SymmetricKey +} + +// MARK: - Metadata Data Source Protocol + +protocol MetadataDataSourceProtocol { + func saveMetadata(_ metadata: PhotoMetadata) async throws + func loadMetadata(withId id: String) async throws -> PhotoMetadata? + func loadAllMetadata() async throws -> [PhotoMetadata] + func deleteMetadata(withId id: String) async throws + func updateMetadata(_ metadata: PhotoMetadata) async throws + func findMetadata(matching predicate: PhotoPredicate) async throws -> [PhotoMetadata] +} + +// MARK: - Cache Data Source Protocol + +enum CachePriority { + case high + case normal + case low +} + +protocol CacheDataSourceProtocol { + func cacheImage(_ image: UIImage, forId id: String) + func getCachedImage(forId id: String) -> UIImage? + func cacheThumbnail(_ thumbnail: UIImage, forId id: String) + func getCachedThumbnail(forId id: String) -> UIImage? + func clearCache() + func clearCacheForId(_ id: String) + func preloadImages(ids: [String], priority: CachePriority) +} diff --git a/SnapSafe/Repository/SecureImageRepository.swift b/SnapSafe/Repository/SecureImageRepository.swift new file mode 100644 index 0000000..84016e4 --- /dev/null +++ b/SnapSafe/Repository/SecureImageRepository.swift @@ -0,0 +1,293 @@ +// +// SecureImageRepository.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation +import UIKit + +protocol SecureImageRepositoryProtocol { + // Core CRUD operations + func savePhoto(_ imageData: Data, metadata: PhotoMetadata) async throws -> SecurePhoto + func loadPhoto(withId id: String) async throws -> SecurePhoto + func loadAllPhotos() async throws -> [SecurePhoto] + func deletePhoto(withId id: String) async throws + + // Batch operations + func loadPhotosWithPredicate(_ predicate: PhotoPredicate) async throws -> [SecurePhoto] + func preloadAdjacentPhotos(currentId: String, adjacentCount: Int) async + + // Import/Export + func importFromCamera(_ imageData: Data) async throws -> SecurePhoto + func importFromLibrary(_ imageData: Data) async throws -> SecurePhoto + func exportPhoto(_ photo: SecurePhoto, format: ExportFormat) async throws -> Data + + // Face detection integration + func updateFaceDetectionResults(_ photoId: String, faces: [DetectedFace]) async throws -> SecurePhoto + + // Cache management + func preloadThumbnails(for photoIds: [String]) async + func clearCache() +} + +final class SecureImageRepository: SecureImageRepositoryProtocol { + private let fileSystemDataSource: FileSystemDataSourceProtocol + private let encryptionDataSource: EncryptionDataSourceProtocol + private let metadataDataSource: MetadataDataSourceProtocol + private let cacheDataSource: CacheDataSourceProtocol + + init( + fileSystemDataSource: FileSystemDataSourceProtocol, + encryptionDataSource: EncryptionDataSourceProtocol, + metadataDataSource: MetadataDataSourceProtocol, + cacheDataSource: CacheDataSourceProtocol + ) { + self.fileSystemDataSource = fileSystemDataSource + self.encryptionDataSource = encryptionDataSource + self.metadataDataSource = metadataDataSource + self.cacheDataSource = cacheDataSource + } + + func savePhoto(_ imageData: Data, metadata: PhotoMetadata) async throws -> SecurePhoto { + // 1. Encrypt the image data + let encryptedData = try await encryptionDataSource.encryptImageData(imageData) + + // 2. Save encrypted data to file system + let fileURL = try await fileSystemDataSource.saveImageData(encryptedData, withId: metadata.id) + + // 3. Save metadata + try await metadataDataSource.saveMetadata(metadata) + + // 4. Create and cache thumbnail + let image = UIImage(data: imageData)! + let thumbnail = generateThumbnail(from: image) + cacheDataSource.cacheThumbnail(thumbnail, forId: metadata.id) + + // 5. Create SecurePhoto object + return SecurePhoto( + id: metadata.id, + encryptedData: encryptedData, + metadata: metadata, + cachedThumbnail: thumbnail + ) + } + + func loadPhoto(withId id: String) async throws -> SecurePhoto { + // 1. Load metadata + guard let metadata = try await metadataDataSource.loadMetadata(withId: id) else { + throw SecureImageRepositoryError.photoNotFound(id: id) + } + + // 2. Check cache first + if let cachedImage = cacheDataSource.getCachedImage(forId: id) { + let thumbnail = cacheDataSource.getCachedThumbnail(forId: id) ?? generateThumbnail(from: cachedImage) + return SecurePhoto( + id: id, + encryptedData: Data(), // Don't need encrypted data if we have cached image + metadata: metadata, + cachedImage: cachedImage, + cachedThumbnail: thumbnail + ) + } + + // 3. Load encrypted data from file system + let encryptedData = try await fileSystemDataSource.loadImageData(withId: id) + + // 4. Check for cached thumbnail + let cachedThumbnail = cacheDataSource.getCachedThumbnail(forId: id) + + return SecurePhoto( + id: id, + encryptedData: encryptedData, + metadata: metadata, + cachedThumbnail: cachedThumbnail + ) + } + + func loadAllPhotos() async throws -> [SecurePhoto] { + let allMetadata = try await metadataDataSource.loadAllMetadata() + var photos: [SecurePhoto] = [] + + for metadata in allMetadata { + do { + let photo = try await loadPhoto(withId: metadata.id) + photos.append(photo) + } catch { + // Log error but continue loading other photos + print("Error loading photo \(metadata.id): \(error)") + continue + } + } + + return photos + } + + func deletePhoto(withId id: String) async throws { + // 1. Delete from file system + try await fileSystemDataSource.deleteImageData(withId: id) + + // 2. Delete metadata + try await metadataDataSource.deleteMetadata(withId: id) + + // 3. Clear from cache + cacheDataSource.clearCacheForId(id) + } + + func loadPhotosWithPredicate(_ predicate: PhotoPredicate) async throws -> [SecurePhoto] { + let matchingMetadata = try await metadataDataSource.findMetadata(matching: predicate) + var photos: [SecurePhoto] = [] + + for metadata in matchingMetadata { + do { + let photo = try await loadPhoto(withId: metadata.id) + photos.append(photo) + } catch { + print("Error loading photo \(metadata.id): \(error)") + continue + } + } + + return photos + } + + func preloadAdjacentPhotos(currentId: String, adjacentCount: Int = 2) async { + do { + let allMetadata = try await metadataDataSource.loadAllMetadata() + guard let currentIndex = allMetadata.firstIndex(where: { $0.id == currentId }) else { return } + + // Determine adjacent photo IDs + var adjacentIds: [String] = [] + for offset in 1 ... adjacentCount { + if currentIndex - offset >= 0 { + adjacentIds.append(allMetadata[currentIndex - offset].id) + } + if currentIndex + offset < allMetadata.count { + adjacentIds.append(allMetadata[currentIndex + offset].id) + } + } + + // Preload adjacent photos in background + Task { + for id in adjacentIds { + do { + let photo = try await loadPhoto(withId: id) + if let image = try? await photo.decryptedImage(using: encryptionDataSource) { + cacheDataSource.cacheImage(image, forId: id) + } + } catch { + print("Error preloading photo \(id): \(error)") + } + } + } + } catch { + print("Error in preloadAdjacentPhotos: \(error)") + } + } + + func importFromCamera(_ imageData: Data) async throws -> SecurePhoto { + let id = UUID().uuidString + let metadata = PhotoMetadata( + id: id, + fileSize: imageData.count + ) + return try await savePhoto(imageData, metadata: metadata) + } + + func importFromLibrary(_ imageData: Data) async throws -> SecurePhoto { + let id = UUID().uuidString + let metadata = PhotoMetadata( + id: id, + fileSize: imageData.count + ) + return try await savePhoto(imageData, metadata: metadata) + } + + func exportPhoto(_ photo: SecurePhoto, format: ExportFormat) async throws -> Data { + // 1. Get decrypted image + let image = try await photo.decryptedImage(using: encryptionDataSource) + + // 2. Convert to requested format + switch format { + case let .jpeg(quality): + guard let data = image.jpegData(compressionQuality: quality) else { + throw SecureImageRepositoryError.exportFailed(reason: "Failed to convert to JPEG") + } + return data + + case .png: + guard let data = image.pngData() else { + throw SecureImageRepositoryError.exportFailed(reason: "Failed to convert to PNG") + } + return data + + case .heic: + // HEIC export would require additional implementation + throw SecureImageRepositoryError.exportFailed(reason: "HEIC export not yet implemented") + } + } + + func updateFaceDetectionResults(_ photoId: String, faces: [DetectedFace]) async throws -> SecurePhoto { + guard var metadata = try await metadataDataSource.loadMetadata(withId: photoId) else { + throw SecureImageRepositoryError.photoNotFound(id: photoId) + } + + // Update metadata with new faces + metadata = PhotoMetadata( + id: metadata.id, + creationDate: metadata.creationDate, + modificationDate: Date(), + fileSize: metadata.fileSize, + faces: faces, + maskMode: metadata.maskMode + ) + + try await metadataDataSource.updateMetadata(metadata) + return try await loadPhoto(withId: photoId) + } + + func preloadThumbnails(for photoIds: [String]) async { + for photoId in photoIds { + do { + let photo = try await loadPhoto(withId: photoId) + if let thumbnail = try? await photo.thumbnail(using: encryptionDataSource) { + cacheDataSource.cacheThumbnail(thumbnail, forId: photoId) + } + } catch { + print("Error preloading thumbnail for \(photoId): \(error)") + } + } + } + + func clearCache() { + cacheDataSource.clearCache() + } + + private func generateThumbnail(from image: UIImage, size: CGSize = CGSize(width: 200, height: 200)) -> UIImage { + let renderer = UIGraphicsImageRenderer(size: size) + return renderer.image { _ in + image.draw(in: CGRect(origin: .zero, size: size)) + } + } +} + +enum SecureImageRepositoryError: Error, LocalizedError { + case photoNotFound(id: String) + case exportFailed(reason: String) + case encryptionFailed(reason: String) + case fileSystemError(reason: String) + + var errorDescription: String? { + switch self { + case let .photoNotFound(id): + "Photo not found with ID: \(id)" + case let .exportFailed(reason): + "Export failed: \(reason)" + case let .encryptionFailed(reason): + "Encryption failed: \(reason)" + case let .fileSystemError(reason): + "File system error: \(reason)" + } + } +} diff --git a/SnapSafe/SnapSafeApp.swift b/SnapSafe/SnapSafeApp.swift index 55681d4..9f7a8ba 100644 --- a/SnapSafe/SnapSafeApp.swift +++ b/SnapSafe/SnapSafeApp.swift @@ -1,5 +1,5 @@ // -// Snap_SafeApp.swift +// SnapSafeApp.swift // SnapSafe // // Created by Bill Booth on 5/2/25. @@ -10,7 +10,7 @@ import SwiftUI @main struct SnapSafeApp: App { @AppStorage("appearanceMode") private var appearanceMode: AppearanceMode = .system - + var body: some Scene { WindowGroup { ContentView() diff --git a/SnapSafe/UseCases/PhotoExportUseCase.swift b/SnapSafe/UseCases/PhotoExportUseCase.swift new file mode 100644 index 0000000..48911e5 --- /dev/null +++ b/SnapSafe/UseCases/PhotoExportUseCase.swift @@ -0,0 +1,66 @@ +// +// PhotoExportUseCase.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation +import UIKit + +final class PhotoExportUseCase { + private let repository: SecureImageRepositoryProtocol + + init(repository: SecureImageRepositoryProtocol) { + self.repository = repository + } + + func exportPhoto(_ photo: SecurePhoto, format: ExportFormat = .jpeg(quality: 0.9)) async throws -> Data { + try await repository.exportPhoto(photo, format: format) + } + + func exportPhotoToPhotoLibrary(_ photo: SecurePhoto) async throws { + let imageData = try await repository.exportPhoto(photo, format: .jpeg(quality: 0.9)) + + guard let image = UIImage(data: imageData) else { + throw PhotoExportError.exportFailed(reason: "Failed to create image from exported data") + } + + // Save to photo library (this would require PhotosFramework integration) + // For now, just validate the export worked + print("Successfully exported photo \(photo.id) to photo library") + } + + func exportMultiplePhotos(_ photos: [SecurePhoto], format: ExportFormat = .jpeg(quality: 0.9)) async throws -> [String: Data] { + var exportedPhotos: [String: Data] = [:] + + for photo in photos { + do { + let data = try await exportPhoto(photo, format: format) + exportedPhotos[photo.id] = data + } catch { + print("Failed to export photo \(photo.id): \(error)") + throw PhotoExportError.batchExportFailed(photoId: photo.id, error: error) + } + } + + return exportedPhotos + } +} + +enum PhotoExportError: Error, LocalizedError { + case exportFailed(reason: String) + case batchExportFailed(photoId: String, error: Error) + case photoLibraryAccessDenied + + var errorDescription: String? { + switch self { + case let .exportFailed(reason): + "Export failed: \(reason)" + case let .batchExportFailed(photoId, error): + "Batch export failed for photo \(photoId): \(error.localizedDescription)" + case .photoLibraryAccessDenied: + "Access to photo library denied" + } + } +} diff --git a/SnapSafe/UseCases/PhotoImportUseCase.swift b/SnapSafe/UseCases/PhotoImportUseCase.swift new file mode 100644 index 0000000..37b4218 --- /dev/null +++ b/SnapSafe/UseCases/PhotoImportUseCase.swift @@ -0,0 +1,59 @@ +// +// PhotoImportUseCase.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation +import UIKit + +final class PhotoImportUseCase { + private let repository: SecureImageRepositoryProtocol + + init(repository: SecureImageRepositoryProtocol) { + self.repository = repository + } + + func importFromCamera(_ image: UIImage) async throws -> SecurePhoto { + guard let imageData = image.jpegData(compressionQuality: 0.9) else { + throw PhotoImportError.invalidImageData + } + return try await repository.importFromCamera(imageData) + } + + func importFromLibrary(_ image: UIImage) async throws -> SecurePhoto { + guard let imageData = image.jpegData(compressionQuality: 0.9) else { + throw PhotoImportError.invalidImageData + } + return try await repository.importFromLibrary(imageData) + } + + func importImageData(_ data: Data, source: ImportSource) async throws -> SecurePhoto { + switch source { + case .camera: + try await repository.importFromCamera(data) + case .photoLibrary: + try await repository.importFromLibrary(data) + } + } +} + +enum ImportSource { + case camera + case photoLibrary +} + +enum PhotoImportError: Error, LocalizedError { + case invalidImageData + case importFailed(reason: String) + + var errorDescription: String? { + switch self { + case .invalidImageData: + "Invalid image data provided" + case let .importFailed(reason): + "Import failed: \(reason)" + } + } +} diff --git a/SnapSafe/UseCases/PhotoLibraryUseCase.swift b/SnapSafe/UseCases/PhotoLibraryUseCase.swift new file mode 100644 index 0000000..24e133f --- /dev/null +++ b/SnapSafe/UseCases/PhotoLibraryUseCase.swift @@ -0,0 +1,41 @@ +// +// PhotoLibraryUseCase.swift +// SnapSafe +// +// Created by Claude on 5/28/25. +// + +import Foundation + +final class PhotoLibraryUseCase { + private let repository: SecureImageRepositoryProtocol + + init(repository: SecureImageRepositoryProtocol) { + self.repository = repository + } + + func loadAllPhotos() async throws -> [SecurePhoto] { + try await repository.loadAllPhotos() + } + + func loadPhoto(withId id: String) async throws -> SecurePhoto { + try await repository.loadPhoto(withId: id) + } + + func deletePhoto(withId id: String) async throws { + try await repository.deletePhoto(withId: id) + } + + func searchPhotos(dateRange: ClosedRange? = nil, hasFaces: Bool? = nil, maskMode: MaskMode? = nil) async throws -> [SecurePhoto] { + let predicate = PhotoPredicate(dateRange: dateRange, hasFaces: hasFaces, maskMode: maskMode) + return try await repository.loadPhotosWithPredicate(predicate) + } + + func preloadAdjacentPhotos(currentId: String, adjacentCount: Int = 2) async { + await repository.preloadAdjacentPhotos(currentId: currentId, adjacentCount: adjacentCount) + } + + func updateFaceDetectionResults(photoId: String, faces: [DetectedFace]) async throws -> SecurePhoto { + try await repository.updateFaceDetectionResults(photoId, faces: faces) + } +} diff --git a/SnapSafe/AuthenticationOverlayView.swift b/SnapSafe/Views/AuthenticationOverlayView.swift similarity index 82% rename from SnapSafe/AuthenticationOverlayView.swift rename to SnapSafe/Views/AuthenticationOverlayView.swift index a620d13..8ef8947 100644 --- a/SnapSafe/AuthenticationOverlayView.swift +++ b/SnapSafe/Views/AuthenticationOverlayView.swift @@ -7,18 +7,18 @@ import SwiftUI -/// A fullscreen overlay that forces PIN authentication +// A fullscreen overlay that forces PIN authentication struct AuthenticationOverlayView: View { @ObservedObject private var appStateCoordinator = AppStateCoordinator.shared @State private var isAuthenticated = false - + var body: some View { ZStack { // Full screen cover with dark background Color.black .opacity(0.98) .edgesIgnoringSafeArea(.all) - + // PIN verification view PINVerificationView(isAuthenticated: $isAuthenticated) .onChange(of: isAuthenticated) { _, authenticated in @@ -32,16 +32,14 @@ struct AuthenticationOverlayView: View { } } -/// ViewModifier to add authentication overlay when needed +// ViewModifier to add authentication overlay when needed struct AuthenticationOverlay: ViewModifier { @ObservedObject private var appStateCoordinator = AppStateCoordinator.shared - + func body(content: Content) -> some View { ZStack { - // Main content content - - // Authentication overlay when needed + if appStateCoordinator.needsAuthentication { AuthenticationOverlayView() } @@ -51,8 +49,8 @@ struct AuthenticationOverlay: ViewModifier { // Extension to make the modifier easier to use extension View { - /// Add authentication overlay that will appear when authentication is required + // Add authentication overlay that will appear when authentication is required func withAuthenticationOverlay() -> some View { modifier(AuthenticationOverlay()) } -} \ No newline at end of file +} diff --git a/SnapSafe/ContentView.swift b/SnapSafe/Views/CameraView.swift similarity index 50% rename from SnapSafe/ContentView.swift rename to SnapSafe/Views/CameraView.swift index d75ca36..aafec05 100644 --- a/SnapSafe/ContentView.swift +++ b/SnapSafe/Views/CameraView.swift @@ -1,317 +1,19 @@ // -// ContentView.swift +// CameraView.swift // SnapSafe // -// Created by Bill Booth on 5/2/25. +// Created by Bill Booth on 6/10/25. // import AVFoundation -import CoreGraphics -import ImageIO import PhotosUI import SwiftUI - -struct ContentView: View { - @StateObject private var cameraModel = CameraModel() - @StateObject private var locationManager = LocationManager.shared - @ObservedObject private var pinManager = PINManager.shared - @ObservedObject private var appStateCoordinator = AppStateCoordinator.shared - @State private var isShowingSettings = false - @State private var isShowingGallery = false - @State private var isAuthenticated = false - @State private var isPINSetupComplete = false - @State private var isShutterAnimating = false - @Environment(\.scenePhase) private var scenePhase - @ObservedObject private var screenCaptureManager = ScreenCaptureManager.shared - - // Track device orientation changes - @State private var deviceOrientation = UIDevice.current.orientation - - var body: some View { - ZStack { - if !pinManager.isPINSet { - // First time setup - show PIN setup screen - PINSetupView(isPINSetupComplete: $isPINSetupComplete) - } else if !isAuthenticated || appStateCoordinator.needsAuthentication { - // PIN verification screen - PINVerificationView(isAuthenticated: $isAuthenticated) - .onChange(of: isAuthenticated) { _, authenticated in - if authenticated { - // Reset the coordinator's auth state when authenticated - appStateCoordinator.authenticationComplete() - } - } - } else { - // Camera view - now contains both the camera preview and focus indicator - CameraView(cameraModel: cameraModel) - .edgesIgnoringSafeArea(.all) - - // Shutter animation overlay - if isShutterAnimating { - Color.black - .opacity(0.8) - .edgesIgnoringSafeArea(.all) - .transition(.opacity) - } - - // Camera controls overlay - VStack { - // Top control bar with flash toggle and camera switch - HStack { - // Camera switch button - Button(action: { - toggleCameraPosition() - }) { - Image(systemName: "arrow.triangle.2.circlepath.camera") - .font(.system(size: 20)) - .foregroundColor(.white) - .padding(12) - .background(Color.black.opacity(0.6)) - .clipShape(Circle()) - } - .padding(.top, 16) - .padding(.leading, 16) - - Spacer() - - // Flash control button - disabled for front camera - Button(action: { - toggleFlashMode() - }) { - Image(systemName: flashIcon(for: cameraModel.flashMode)) - .font(.system(size: 20)) - .foregroundColor(cameraModel.cameraPosition == .front ? .gray : .white) - .padding(12) - .background(Color.black.opacity(0.6)) - .clipShape(Circle()) - } - .disabled(cameraModel.cameraPosition == .front) - .padding(.top, 16) - .padding(.trailing, 16) - } - - Spacer() - - // Zoom level indicator - ZStack { - Capsule() - .fill(Color.black.opacity(0.6)) - .frame(width: 80, height: 30) - - Text(String(format: "%.1fx", cameraModel.zoomFactor)) - .font(.system(size: 14, weight: .bold)) - .foregroundColor(.white) - } - // Show for all zoom levels (including 0.5x for wide angle) - .opacity(cameraModel.zoomFactor != 1.0 ? 1.0 : 0.0) - .animation(.easeInOut, value: cameraModel.zoomFactor) - .padding(.bottom, 10) - // Rotate the zoom indicator based on device orientation - .rotationEffect(getRotationAngle()) - // Separate animation for rotation to ensure it responds to device orientation - // changes independent of zoom changes - .animation(.easeInOut, value: deviceOrientation) - - HStack { - Button(action: { - isShowingGallery = true - }) { - Image(systemName: "photo.on.rectangle") - .font(.system(size: 24)) - .foregroundColor(.white) - .padding() - .background(Color.black.opacity(0.6)) - .clipShape(Circle()) - } - .padding() - - Spacer() - - // Capture button - Button(action: { - triggerShutterEffect() - cameraModel.capturePhoto() - }) { - Circle() - .strokeBorder(Color.white, lineWidth: 4) - .frame(width: 80, height: 80) - .background(Circle().fill(Color.white)) - .padding() - } - - Spacer() - Button(action: { - isShowingSettings = true - }) { - Image(systemName: "gear") - .font(.system(size: 24)) - .foregroundColor(.white) - .padding() - .background(Color.black.opacity(0.6)) - .clipShape(Circle()) - } - .padding() - } - .padding(.bottom) - } - } - } - .animation(.easeInOut(duration: 0.1), value: isShutterAnimating) - .sheet(isPresented: $isShowingSettings) { - SettingsView() - .obscuredWhenInactive() - .screenCaptureProtected() - .handleAppState(isPresented: $isShowingSettings) - .withAuthenticationOverlay() - } - .fullScreenCover(isPresented: $isShowingGallery) { - NavigationView { - SecureGalleryView(onDismiss: { - isShowingGallery = false - }) - .obscuredWhenInactive() - .screenCaptureProtected() - .handleAppState(isPresented: $isShowingGallery) - .withAuthenticationOverlay() - } - } - // Apply privacy shield when app is inactive (task switcher, control center, etc.) - .obscuredWhenInactive() - // Protect against screen recording and screenshots - .screenCaptureProtected() - // Monitor PIN setup completion - .onChange(of: isPINSetupComplete) { _, completed in - if completed { - print("PIN setup complete, authenticating user") - isAuthenticated = true - // Reset flag to avoid issues on subsequent launches - DispatchQueue.main.asyncAfter(deadline: .now() + 1) { - isPINSetupComplete = false - } - } - } - .onAppear { - print("ContentView appeared - PIN is set: \(pinManager.isPINSet), require PIN on resume: \(pinManager.requirePINOnResume)") - - // Check if PIN is set, and only auto-authenticate if PIN check is not required - if pinManager.isPINSet { - // Only auto-authenticate if PIN verification is not required - isAuthenticated = !pinManager.requirePINOnResume - print("PIN is set, auto-authentication set to: \(isAuthenticated)") - } else { - print("PIN is not set, showing PIN setup screen") - } - - // Start monitoring orientation changes - UIDevice.current.beginGeneratingDeviceOrientationNotifications() - NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, - object: nil, - queue: .main) { _ in - self.deviceOrientation = UIDevice.current.orientation - } - } - .onDisappear { - // Stop monitoring orientation changes - NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: nil) - UIDevice.current.endGeneratingDeviceOrientationNotifications() - } - // Scene phase monitoring for background/foreground transitions - .onChange(of: scenePhase) { _, newPhase in - print("ContentView scene phase changed to: \(newPhase)") - - if newPhase == .active { - // App is becoming active - let coordinator handle this - appStateCoordinator.handleWillEnterForeground() - } else if newPhase == .background { - // App is going to background - let coordinator handle this - appStateCoordinator.handleDidEnterBackground() - } else if newPhase == .inactive { - // Transitional state - print("App becoming inactive") - } - } - // Monitor authentication state from coordinator - .onChange(of: appStateCoordinator.needsAuthentication) { _, needsAuth in - if needsAuth { - // Force re-authentication - isAuthenticated = false - } - } - // Monitor dismiss all sheets signal - .onChange(of: appStateCoordinator.dismissAllSheets) { _, shouldDismiss in - if shouldDismiss { - // Dismiss all sheets - isShowingSettings = false - isShowingGallery = false - - // Reset flag after a short delay - DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { - appStateCoordinator.resetAuthenticationState() - } - } - } - } - - private func triggerShutterEffect() { - isShutterAnimating = true - DispatchQueue.main.asyncAfter(deadline: .now() + 0.15) { - isShutterAnimating = false - } - } - - private func toggleFlashMode() { - switch cameraModel.flashMode { - case .auto: - cameraModel.flashMode = .on - case .on: - cameraModel.flashMode = .off - case .off: - cameraModel.flashMode = .auto - @unknown default: - cameraModel.flashMode = .auto - } - } - - // Toggle between front and back cameras - private func toggleCameraPosition() { - // Toggle between front and back cameras - let newPosition: AVCaptureDevice.Position = (cameraModel.cameraPosition == .back) ? .front : .back - cameraModel.switchCamera(to: newPosition) - } - - private func flashIcon(for mode: AVCaptureDevice.FlashMode) -> String { - switch mode { - case .auto: - return "bolt.badge.a" - case .on: - return "bolt" - case .off: - return "bolt.slash" - @unknown default: - return "bolt.badge.a" - } - } - - // Get rotation angle for the zoom indicator based on device orientation - private func getRotationAngle() -> Angle { - switch UIDevice.current.orientation { - case .landscapeLeft: - return Angle(degrees: 90) - case .landscapeRight: - return Angle(degrees: -90) - case .portraitUpsideDown: - return Angle(degrees: 180) - default: - return Angle(degrees: 0) // Default to portrait - } - } -} - +import UIKit // SwiftUI wrapper for the camera preview struct CameraView: View { @ObservedObject var cameraModel: CameraModel - + // Add a slightly darker background to emphasize the capture area let backgroundOpacity: Double = 0.2 @@ -320,11 +22,11 @@ struct CameraView: View { ZStack { // Background color to emphasize the capture area Color.black - .edgesIgnoringSafeArea(.all) - + .ignoresSafeArea() + // Camera preview represented by UIViewRepresentable CameraPreviewView(cameraModel: cameraModel, viewSize: geometry.size) - .edgesIgnoringSafeArea(.all) + .ignoresSafeArea() // Focus indicator overlay with proper coordinates if cameraModel.showingFocusIndicator, let point = cameraModel.focusIndicatorPoint { @@ -341,53 +43,15 @@ struct CameraView: View { } } -// Focus square indicator -struct FocusIndicatorView: View { - // Animation state - @State private var isAnimating = false - - var body: some View { - ZStack { - // Outer square with animation - RoundedRectangle(cornerRadius: 8) - .stroke(Color.yellow, lineWidth: 2) - .frame(width: isAnimating ? 70 : 80, height: isAnimating ? 70 : 80) - .animation(Animation.easeInOut(duration: 0.5).repeatForever(autoreverses: true), value: isAnimating) - - // Inner square - RoundedRectangle(cornerRadius: 6) - .stroke(Color.white, lineWidth: 1.5) - .frame(width: 50, height: 50) - - // Center crosshair - ZStack { - // Horizontal line - Rectangle() - .fill(Color.yellow) - .frame(width: 20, height: 1) - - // Vertical line - Rectangle() - .fill(Color.yellow) - .frame(width: 1, height: 20) - } - } - .shadow(color: Color.black.opacity(0.5), radius: 2, x: 1, y: 1) - .onAppear { - isAnimating = true - } - } -} - // UIViewRepresentable for camera preview struct CameraPreviewView: UIViewRepresentable { @ObservedObject var cameraModel: CameraModel var viewSize: CGSize // Store the parent view's size for coordinate conversion - + // Standard photo aspect ratio is 4:3 // This is the ratio of most iPhone photos in portrait mode (3:4 actually, as width:height) private let photoAspectRatio: CGFloat = 3.0 / 4.0 // width/height in portrait mode - + // Store the view reference to help with coordinate mapping class CameraPreviewHolder { weak var view: UIView? @@ -401,83 +65,83 @@ struct CameraPreviewView: UIViewRepresentable { func makeUIView(context: Context) -> UIView { // Create a view with the exact size passed from parent let view = UIView(frame: CGRect(origin: .zero, size: viewSize)) - print("📐 Creating camera preview with size: \(viewSize.width)x\(viewSize.height)") + print("Creating camera preview with size: \(viewSize.width)x\(viewSize.height)") // Store the view reference viewHolder.view = view - + // Calculate the container size to match photo aspect ratio let containerSize = calculatePreviewContainerSize(for: viewSize) let containerOrigin = CGPoint( x: (viewSize.width - containerSize.width) / 2, y: (viewSize.height - containerSize.height) / 2 ) - + // Create the container view with proper aspect ratio let containerView = UIView(frame: CGRect(origin: containerOrigin, size: containerSize)) containerView.backgroundColor = .clear containerView.clipsToBounds = true view.addSubview(containerView) viewHolder.previewContainer = containerView - + // Add visual guides for the capture area - + // 1. Add a border to visualize the capture area let borderLayer = CALayer() borderLayer.frame = containerView.bounds borderLayer.borderColor = UIColor.white.withAlphaComponent(0.7).cgColor borderLayer.borderWidth = 2.0 containerView.layer.addSublayer(borderLayer) - + // 2. Add corner brackets for a more camera-like appearance let cornerSize: CGFloat = 20.0 let cornerThickness: CGFloat = 3.0 let cornerColor = UIColor.white.withAlphaComponent(0.8).cgColor - + // Top-left corner let topLeftCornerH = CALayer() topLeftCornerH.frame = CGRect(x: 0, y: 0, width: cornerSize, height: cornerThickness) topLeftCornerH.backgroundColor = cornerColor containerView.layer.addSublayer(topLeftCornerH) - + let topLeftCornerV = CALayer() topLeftCornerV.frame = CGRect(x: 0, y: 0, width: cornerThickness, height: cornerSize) topLeftCornerV.backgroundColor = cornerColor containerView.layer.addSublayer(topLeftCornerV) - + // Top-right corner let topRightCornerH = CALayer() topRightCornerH.frame = CGRect(x: containerSize.width - cornerSize, y: 0, width: cornerSize, height: cornerThickness) topRightCornerH.backgroundColor = cornerColor containerView.layer.addSublayer(topRightCornerH) - + let topRightCornerV = CALayer() topRightCornerV.frame = CGRect(x: containerSize.width - cornerThickness, y: 0, width: cornerThickness, height: cornerSize) topRightCornerV.backgroundColor = cornerColor containerView.layer.addSublayer(topRightCornerV) - + // Bottom-left corner let bottomLeftCornerH = CALayer() bottomLeftCornerH.frame = CGRect(x: 0, y: containerSize.height - cornerThickness, width: cornerSize, height: cornerThickness) bottomLeftCornerH.backgroundColor = cornerColor containerView.layer.addSublayer(bottomLeftCornerH) - + let bottomLeftCornerV = CALayer() bottomLeftCornerV.frame = CGRect(x: 0, y: containerSize.height - cornerSize, width: cornerThickness, height: cornerSize) bottomLeftCornerV.backgroundColor = cornerColor containerView.layer.addSublayer(bottomLeftCornerV) - + // Bottom-right corner let bottomRightCornerH = CALayer() bottomRightCornerH.frame = CGRect(x: containerSize.width - cornerSize, y: containerSize.height - cornerThickness, width: cornerSize, height: cornerThickness) bottomRightCornerH.backgroundColor = cornerColor containerView.layer.addSublayer(bottomRightCornerH) - + let bottomRightCornerV = CALayer() bottomRightCornerV.frame = CGRect(x: containerSize.width - cornerThickness, y: containerSize.height - cornerSize, width: cornerThickness, height: cornerSize) bottomRightCornerV.backgroundColor = cornerColor containerView.layer.addSublayer(bottomRightCornerV) - + // Add a label to indicate that this is the capture area let captureLabel = UILabel() captureLabel.text = "CAPTURE AREA" @@ -491,7 +155,7 @@ struct CameraPreviewView: UIViewRepresentable { height: captureLabel.frame.height ) containerView.addSubview(captureLabel) - + // Create and configure the preview layer let previewLayer = AVCaptureVideoPreviewLayer() previewLayer.session = cameraModel.session @@ -542,7 +206,7 @@ struct CameraPreviewView: UIViewRepresentable { // We prioritize fitting the width to match the device's screen width let width = size.width let height = width / photoAspectRatio - + // If height exceeds the available space, adjust both dimensions if height > size.height { // Use the available height @@ -553,53 +217,53 @@ struct CameraPreviewView: UIViewRepresentable { return CGSize(width: width, height: height) } } - + func updateUIView(_ uiView: UIView, context _: Context) { // Update the preview layer frame when the view updates DispatchQueue.main.async { // Update frame with the latest size uiView.frame = CGRect(origin: .zero, size: viewSize) - + // Calculate the container size to match photo aspect ratio let containerSize = calculatePreviewContainerSize(for: viewSize) let containerOrigin = CGPoint( x: (viewSize.width - containerSize.width) / 2, y: (viewSize.height - containerSize.height) / 2 ) - + // Update the container view frame if let containerView = viewHolder.previewContainer { containerView.frame = CGRect(origin: containerOrigin, size: containerSize) - + // Update the preview layer frame to match container if let layer = viewHolder.previewLayer { layer.frame = containerView.bounds - + // Ensure we're using the correct layer in the camera model // Only update if necessary to avoid excessive property changes if cameraModel.preview !== layer { cameraModel.preview = layer } } - + // Update all visual indicators if containerView.layer.sublayers?.count ?? 0 > 0 { // Update border if let borderLayer = containerView.layer.sublayers?.first(where: { $0.borderWidth > 0 }) { borderLayer.frame = containerView.bounds } - + // Update corner guides let cornerSize: CGFloat = 20.0 let cornerThickness: CGFloat = 3.0 - + // Find corner guides by their size and position for layer in containerView.layer.sublayers ?? [] { // Skip the border layer if layer.borderWidth > 0 { continue } - + // Update corner layers based on their position - if layer.frame.origin.x == 0 && layer.frame.origin.y == 0 { + if layer.frame.origin.x == 0, layer.frame.origin.y == 0 { // Top-left horizontal if layer.frame.height == cornerThickness { layer.frame = CGRect(x: 0, y: 0, width: cornerSize, height: cornerThickness) @@ -608,8 +272,7 @@ struct CameraPreviewView: UIViewRepresentable { else if layer.frame.width == cornerThickness { layer.frame = CGRect(x: 0, y: 0, width: cornerThickness, height: cornerSize) } - } - else if layer.frame.origin.y == 0 && layer.frame.origin.x > 0 { + } else if layer.frame.origin.y == 0, layer.frame.origin.x > 0 { // Top-right horizontal if layer.frame.height == cornerThickness { layer.frame = CGRect(x: containerSize.width - cornerSize, y: 0, width: cornerSize, height: cornerThickness) @@ -618,8 +281,7 @@ struct CameraPreviewView: UIViewRepresentable { else if layer.frame.width == cornerThickness { layer.frame = CGRect(x: containerSize.width - cornerThickness, y: 0, width: cornerThickness, height: cornerSize) } - } - else if layer.frame.origin.x == 0 && layer.frame.origin.y > 0 { + } else if layer.frame.origin.x == 0, layer.frame.origin.y > 0 { // Bottom-left horizontal if layer.frame.height == cornerThickness { layer.frame = CGRect(x: 0, y: containerSize.height - cornerThickness, width: cornerSize, height: cornerThickness) @@ -628,8 +290,7 @@ struct CameraPreviewView: UIViewRepresentable { else if layer.frame.width == cornerThickness { layer.frame = CGRect(x: 0, y: containerSize.height - cornerSize, width: cornerThickness, height: cornerSize) } - } - else if layer.frame.origin.x > 0 && layer.frame.origin.y > 0 { + } else if layer.frame.origin.x > 0, layer.frame.origin.y > 0 { // Bottom-right horizontal if layer.frame.height == cornerThickness { layer.frame = CGRect(x: containerSize.width - cornerSize, y: containerSize.height - cornerThickness, width: cornerSize, height: cornerThickness) @@ -640,7 +301,7 @@ struct CameraPreviewView: UIViewRepresentable { } } } - + // Update the capture area label position for subview in containerView.subviews { if let label = subview as? UILabel, label.text == "CAPTURE AREA" { @@ -657,30 +318,30 @@ struct CameraPreviewView: UIViewRepresentable { // Update the size in the model cameraModel.viewSize = containerSize // Store the actual photo preview size - //print("📐 Updated camera preview to size: \(containerSize.width)x\(containerSize.height)") + // print("📐 Updated camera preview to size: \(containerSize.width)x\(containerSize.height)") } } - + // This method is called once after makeUIView func makeCoordinator() -> Coordinator { // Create coordinator first - this shouldn't trigger camera operations let coordinator = Coordinator(self) - + // Capture cameraModel to avoid potential reference issues let capturedCameraModel = cameraModel - + // Give a slight delay before starting the camera session // This ensures all UI setup is complete and configuration has been committed DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { // Start camera on background thread after delay DispatchQueue.global(qos: .userInitiated).async { if !capturedCameraModel.session.isRunning { - print("📸 Starting camera session from makeCoordinator after delay") + print("Starting camera session from makeCoordinator after delay") capturedCameraModel.session.startRunning() } } } - + return coordinator } @@ -720,17 +381,16 @@ struct CameraPreviewView: UIViewRepresentable { guard let view = gesture.view else { return } let location = gesture.location(in: view) print("Double tap detected at \(location.x), \(location.y)") - + // Get the container view for proper coordinate conversion guard let containerView = parent.viewHolder.previewContainer else { return } - + // Check if the tap is within the container bounds let locationInContainer = view.convert(location, to: containerView) if !containerView.bounds.contains(locationInContainer) { print("Tap outside of capture area, ignoring") return } - // Convert touch point to camera coordinate if let layer = parent.viewHolder.previewLayer { @@ -738,7 +398,6 @@ struct CameraPreviewView: UIViewRepresentable { let pointInPreviewLayer = layer.captureDevicePointConverted(fromLayerPoint: locationInContainer) let devicePoint = layer.devicePoint(from: location) print("Converted to device coordinates (2x tap): \(devicePoint.x), \(devicePoint.y)") - // print("Converted to camera coordinates (2x tap): \(pointInPreviewLayer.x), \(pointInPreviewLayer.y)") @@ -754,10 +413,10 @@ struct CameraPreviewView: UIViewRepresentable { guard let view = gesture.view else { return } let location = gesture.location(in: view) print("Single tap detected at \(location.x), \(location.y)") - + // Get the container view for proper coordinate conversion guard let containerView = parent.viewHolder.previewContainer else { return } - + // Check if the tap is within the container bounds let locationInContainer = view.convert(location, to: containerView) if !containerView.bounds.contains(locationInContainer) { @@ -780,157 +439,13 @@ struct CameraPreviewView: UIViewRepresentable { } // MARK: - Conversion helpers + extension AVCaptureVideoPreviewLayer { func devicePoint(from viewPoint: CGPoint) -> CGPoint { - return self.captureDevicePointConverted(fromLayerPoint: viewPoint) + captureDevicePointConverted(fromLayerPoint: viewPoint) } func viewPoint(from devicePoint: CGPoint) -> CGPoint { - return self.layerPointConverted(fromCaptureDevicePoint: devicePoint) - } -} - -// Authentication view for the initial screen -struct AuthenticationView: View { - @Binding var isAuthenticated: Bool - @State private var pin = "" -// private let authManager = AuthenticationManager() - - var body: some View { - EmptyView() -// VStack(spacing: 20) { -// Image(systemName: "lock.shield") -// .font(.system(size: 70)) -// .foregroundColor(.blue) -// .padding(.bottom, 30) -// -// Text("Secure Camera") -// .font(.largeTitle) -// .bold() -// -// Text("Enter your device PIN to continue") -// .foregroundColor(.secondary) -// -// // Simulated PIN entry UI -// // In a real app, we'd use the device authentication -// SecureField("PIN", text: $pin) -// .keyboardType(.numberPad) -// .padding() -// .background(RoundedRectangle(cornerRadius: 8).stroke(Color.gray, lineWidth: 1)) -// .padding(.horizontal, 50) -// -// Button(action: { -// // Authenticate with device PIN -// authManager.authenticate(withMethod: .devicePIN) { success in -// if success { -// isAuthenticated = true -// } else { -// // Handle failed authentication -// pin = "" -// } -// } -// }) { -// Text("Unlock") -// .foregroundColor(.white) -// .padding() -// .frame(width: 200) -// .background(Color.blue) -// .cornerRadius(10) -// } -// .padding(.top, 30) -// } -// .padding() - } -} - -// Settings view with sharing, location, and security sections - -// Photo cell view for gallery items -struct PhotoCell: View { - let photo: SecurePhoto - let isSelected: Bool - let isSelecting: Bool - let onTap: () -> Void - let onDelete: () -> Void - - // Track whether this cell is visible in the viewport - @State private var isVisible: Bool = false - - // Cell size - private let cellSize: CGFloat = 100 - - var body: some View { - ZStack(alignment: .topTrailing) { - // Photo image that fills the entire cell - Image(uiImage: photo.thumbnail) - .resizable() - .aspectRatio(contentMode: .fill) // Use .fill to cover the entire cell - .frame(width: cellSize, height: cellSize) - .clipped() // Clip any overflow - .cornerRadius(10) - .onTapGesture(perform: onTap) - .overlay( - RoundedRectangle(cornerRadius: 10) - .stroke(isSelected ? Color.blue : Color.clear, lineWidth: 3) - ) - // Track appearance/disappearance for memory management - .onAppear { - // This cell is now visible - isVisible = true - photo.isVisible = true - MemoryManager.shared.reportThumbnailLoaded() - } - .onDisappear { - // This cell is no longer visible - isVisible = false - photo.markAsInvisible() - // Let the memory manager know it can clean up if needed - MemoryManager.shared.checkMemoryUsage() - } - - // Selection checkmark when in selection mode and selected - if isSelecting && isSelected { - Image(systemName: "checkmark.circle.fill") - .font(.system(size: 24)) - .foregroundColor(.blue) - .background(Circle().fill(Color.white)) - .padding(5) - } - } - } -} - -extension UIDeviceOrientation { - func getRotationAngle() -> Double { - switch self { - case .portrait: - return 90 // device upright → rotate 90° CW - case .portraitUpsideDown: - return 270 // device upside down → rotate 270° CW - case .landscapeLeft: - return 0 // device rotated left (home button right) → 0° rotation (natural) - case .landscapeRight: - return 180 // device rotated right (home button left) → 180° rotation - default: - return 90 // Default to portrait rotation if unknown - } - } -} - -// Extension for UIImage to get an image with the correct orientation applied -extension UIImage { - func imageWithProperOrientation() -> UIImage { - // If already in correct orientation, return self - if self.imageOrientation == .up { - return self - } - - // Create a proper oriented image - UIGraphicsBeginImageContextWithOptions(self.size, false, self.scale) - self.draw(in: CGRect(origin: .zero, size: self.size)) - let normalizedImage = UIGraphicsGetImageFromCurrentImageContext()! - UIGraphicsEndImageContext() - - return normalizedImage + layerPointConverted(fromCaptureDevicePoint: devicePoint) } } diff --git a/SnapSafe/Views/ContentView.swift b/SnapSafe/Views/ContentView.swift new file mode 100644 index 0000000..570be9a --- /dev/null +++ b/SnapSafe/Views/ContentView.swift @@ -0,0 +1,346 @@ +// +// ContentView.swift +// SnapSafe +// +// Created by Bill Booth on 5/2/25. +// + +import AVFoundation +import CoreGraphics +import ImageIO +import PhotosUI +import SwiftUI + +struct ContentView: View { + @StateObject private var cameraModel = CameraModel() + @StateObject private var locationManager = LocationManager.shared + @ObservedObject private var pinManager = PINManager.shared + @ObservedObject private var appStateCoordinator = AppStateCoordinator.shared + @State private var isShowingSettings = false + @State private var isShowingGallery = false + @State private var isAuthenticated = false + @State private var isPINSetupComplete = false + @State private var isShutterAnimating = false + @Environment(\.scenePhase) private var scenePhase + @ObservedObject private var screenCaptureManager = ScreenCaptureManager.shared + + // Track device orientation changes + @State private var deviceOrientation = UIDevice.current.orientation + + var body: some View { + ZStack { + if !pinManager.isPINSet { + // First time setup - show PIN setup screen + PINSetupView(isPINSetupComplete: $isPINSetupComplete) + } else if !isAuthenticated || appStateCoordinator.needsAuthentication { + // PIN verification screen + PINVerificationView(isAuthenticated: $isAuthenticated) + .onChange(of: isAuthenticated) { _, authenticated in + if authenticated { + // Reset the coordinator's auth state when authenticated + appStateCoordinator.authenticationComplete() + } + } + } else { + // Camera view - now contains both the camera preview and focus indicator + CameraView(cameraModel: cameraModel) + .ignoresSafeArea() + + // Shutter animation overlay + if isShutterAnimating { + Color.black + .opacity(0.8) + .ignoresSafeArea() + .transition(.opacity) + } + + // Camera controls overlay + VStack { + // Top control bar with flash toggle and camera switch + HStack { + // Camera switch button + Button(action: { + toggleCameraPosition() + }) { + Image(systemName: "arrow.triangle.2.circlepath.camera") + .font(.system(size: 20)) + .foregroundColor(.white) + .padding(12) + .background(Color.black.opacity(0.6)) + .clipShape(Circle()) + } + .padding(.top, 16) + .padding(.leading, 16) + + Spacer() + + // Flash control button - disabled for front camera + Button(action: { + toggleFlashMode() + }) { + Image(systemName: flashIcon(for: cameraModel.flashMode)) + .font(.system(size: 20)) + .foregroundColor(cameraModel.cameraPosition == .front ? .gray : .white) + .padding(12) + .background(Color.black.opacity(0.6)) + .clipShape(Circle()) + } + .disabled(cameraModel.cameraPosition == .front) + .padding(.top, 16) + .padding(.trailing, 16) + } + + Spacer() + + // Zoom level indicator + ZStack { + Capsule() + .fill(Color.black.opacity(0.6)) + .frame(width: 80, height: 30) + + Text(String(format: "%.1fx", cameraModel.zoomFactor)) + .font(.system(size: 14, weight: .bold)) + .foregroundColor(.white) + } + // Show for all zoom levels (including 0.5x for wide angle) + .opacity(cameraModel.zoomFactor != 1.0 ? 1.0 : 0.0) + .animation(.easeInOut, value: cameraModel.zoomFactor) + .padding(.bottom, 10) + // Rotate the zoom indicator based on device orientation + .rotationEffect(getRotationAngle()) + // Separate animation for rotation to ensure it responds to device orientation + // changes independent of zoom changes + .animation(.easeInOut, value: deviceOrientation) + + HStack { + Button(action: { + isShowingGallery = true + }) { + Image(systemName: "photo.on.rectangle") + .font(.system(size: 24)) + .foregroundColor(.white) + .padding() + .background(Color.black.opacity(0.6)) + .clipShape(Circle()) + } + .padding() + + Spacer() + + // Capture button + Button(action: { + triggerShutterEffect() + cameraModel.capturePhoto() + }) { + Circle() + .strokeBorder(Color.white, lineWidth: 4) + .frame(width: 80, height: 80) + .background(Circle().fill(Color.white)) + .padding() + } + + Spacer() + Button(action: { + isShowingSettings = true + }) { + Image(systemName: "gear") + .font(.system(size: 24)) + .foregroundColor(.white) + .padding() + .background(Color.black.opacity(0.6)) + .clipShape(Circle()) + } + .padding() + } + .padding(.bottom) + } + } + } + .animation(.easeInOut(duration: 0.1), value: isShutterAnimating) + .sheet(isPresented: $isShowingSettings) { + SettingsView() + .obscuredWhenInactive() + .screenCaptureProtected() + .handleAppState(isPresented: $isShowingSettings) + .withAuthenticationOverlay() + } + .fullScreenCover(isPresented: $isShowingGallery) { + NavigationView { + SecureGalleryView(onDismiss: { + isShowingGallery = false + }) + .obscuredWhenInactive() + .screenCaptureProtected() + .handleAppState(isPresented: $isShowingGallery) + .withAuthenticationOverlay() + } + } + // Apply privacy shield when app is inactive (task switcher, control center, etc.) + .obscuredWhenInactive() + // Protect against screen recording and screenshots + .screenCaptureProtected() + // Monitor PIN setup completion + .onChange(of: isPINSetupComplete) { _, completed in + if completed { + print("PIN setup complete, authenticating user") + isAuthenticated = true + // Reset flag to avoid issues on subsequent launches + DispatchQueue.main.asyncAfter(deadline: .now() + 1) { + isPINSetupComplete = false + } + } + } + .onAppear { + print("ContentView appeared - PIN is set: \(pinManager.isPINSet), require PIN on resume: \(pinManager.requirePINOnResume)") + + // Check if PIN is set, and only auto-authenticate if PIN check is not required + if pinManager.isPINSet { + // Only auto-authenticate if PIN verification is not required + isAuthenticated = !pinManager.requirePINOnResume + print("PIN is set, auto-authentication set to: \(isAuthenticated)") + } else { + print("PIN is not set, showing PIN setup screen") + } + + // Start monitoring orientation changes + UIDevice.current.beginGeneratingDeviceOrientationNotifications() + NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification, + object: nil, + queue: .main) + { _ in + deviceOrientation = UIDevice.current.orientation + } + } + .onDisappear { + // Stop monitoring orientation changes + NotificationCenter.default.removeObserver(self, name: UIDevice.orientationDidChangeNotification, object: nil) + UIDevice.current.endGeneratingDeviceOrientationNotifications() + } + // Scene phase monitoring for background/foreground transitions + .onChange(of: scenePhase) { _, newPhase in + print("ContentView scene phase changed to: \(newPhase)") + + if newPhase == .active { + // App is becoming active - let coordinator handle this + appStateCoordinator.handleWillEnterForeground() + } else if newPhase == .background { + // App is going to background - let coordinator handle this + appStateCoordinator.handleDidEnterBackground() + } else if newPhase == .inactive { + // Transitional state + print("App becoming inactive") + } + } + // Monitor authentication state from coordinator + .onChange(of: appStateCoordinator.needsAuthentication) { _, needsAuth in + if needsAuth { + // Force re-authentication + isAuthenticated = false + } + } + // Monitor dismiss all sheets signal + .onChange(of: appStateCoordinator.dismissAllSheets) { _, shouldDismiss in + if shouldDismiss { + // Dismiss all sheets + isShowingSettings = false + isShowingGallery = false + + // Reset flag after a short delay + DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { + appStateCoordinator.resetAuthenticationState() + } + } + } + } + + private func triggerShutterEffect() { + isShutterAnimating = true + DispatchQueue.main.asyncAfter(deadline: .now() + 0.15) { + isShutterAnimating = false + } + } + + private func toggleFlashMode() { + switch cameraModel.flashMode { + case .auto: + cameraModel.flashMode = .on + case .on: + cameraModel.flashMode = .off + case .off: + cameraModel.flashMode = .auto + @unknown default: + cameraModel.flashMode = .auto + } + } + + // Toggle between front and back cameras + private func toggleCameraPosition() { + // Toggle between front and back cameras + let newPosition: AVCaptureDevice.Position = (cameraModel.cameraPosition == .back) ? .front : .back + cameraModel.switchCamera(to: newPosition) + } + + private func flashIcon(for mode: AVCaptureDevice.FlashMode) -> String { + switch mode { + case .auto: + return "bolt.badge.a" + case .on: + return "bolt" + case .off: + return "bolt.slash" + @unknown default: + return "bolt.badge.a" + } + } + + // Get rotation angle for the zoom indicator based on device orientation + private func getRotationAngle() -> Angle { + switch UIDevice.current.orientation { + case .landscapeLeft: + Angle(degrees: 90) + case .landscapeRight: + Angle(degrees: -90) + case .portraitUpsideDown: + Angle(degrees: 180) + default: + Angle(degrees: 0) // Default to portrait + } + } +} + +// Settings view with sharing, location, and security sections + +extension UIDeviceOrientation { + func getRotationAngle() -> Double { + switch self { + case .portrait: + 90 // device upright → rotate 90° CW + case .portraitUpsideDown: + 270 // device upside down → rotate 270° CW + case .landscapeLeft: + 0 // device rotated left (home button right) → 0° rotation (natural) + case .landscapeRight: + 180 // device rotated right (home button left) → 180° rotation + default: + 90 // Default to portrait rotation if unknown + } + } +} + +// Extension for UIImage to get an image with the correct orientation applied +extension UIImage { + func imageWithProperOrientation() -> UIImage { + // If already in correct orientation, return self + if imageOrientation == .up { + return self + } + + // Create a proper oriented image + UIGraphicsBeginImageContextWithOptions(size, false, scale) + draw(in: CGRect(origin: .zero, size: size)) + let normalizedImage = UIGraphicsGetImageFromCurrentImageContext()! + UIGraphicsEndImageContext() + + return normalizedImage + } +} diff --git a/SnapSafe/Views/FocusIndicatorView.swift b/SnapSafe/Views/FocusIndicatorView.swift new file mode 100644 index 0000000..e01a88e --- /dev/null +++ b/SnapSafe/Views/FocusIndicatorView.swift @@ -0,0 +1,46 @@ +// +// FocusIndicatorView.swift +// SnapSafe +// +// Created by Bill Booth on 6/10/25. +// + +import SwiftUI + +// Focus square indicator +struct FocusIndicatorView: View { + // Animation state + @State private var isAnimating = false + + var body: some View { + ZStack { + // Outer square with animation + RoundedRectangle(cornerRadius: 8) + .stroke(Color.yellow, lineWidth: 2) + .frame(width: isAnimating ? 70 : 80, height: isAnimating ? 70 : 80) + .animation(Animation.easeInOut(duration: 0.5).repeatForever(autoreverses: true), value: isAnimating) + + // Inner square + RoundedRectangle(cornerRadius: 6) + .stroke(Color.white, lineWidth: 1.5) + .frame(width: 50, height: 50) + + // Center crosshair + ZStack { + // Horizontal line + Rectangle() + .fill(Color.yellow) + .frame(width: 20, height: 1) + + // Vertical line + Rectangle() + .fill(Color.yellow) + .frame(width: 1, height: 20) + } + } + .shadow(color: Color.black.opacity(0.5), radius: 2, x: 1, y: 1) + .onAppear { + isAnimating = true + } + } +} diff --git a/SnapSafe/PINSetupView.swift b/SnapSafe/Views/PINSetupView.swift similarity index 89% rename from SnapSafe/PINSetupView.swift rename to SnapSafe/Views/PINSetupView.swift index 76e9cc5..b4a05be 100644 --- a/SnapSafe/PINSetupView.swift +++ b/SnapSafe/Views/PINSetupView.swift @@ -14,7 +14,7 @@ struct PINSetupView: View { @State private var showError = false @State private var errorMessage = "" @Binding var isPINSetupComplete: Bool - + var body: some View { NavigationView { VStack(spacing: 30) { @@ -22,16 +22,16 @@ struct PINSetupView: View { .font(.system(size: 70)) .foregroundColor(.blue) .padding(.top, 50) - + Text("Set Up Security PIN") .font(.largeTitle) .bold() - + Text("Please create a 4-digit PIN to secure your photos") .foregroundColor(.secondary) .multilineTextAlignment(.center) .padding(.horizontal) - + VStack(spacing: 20) { SecureField("Enter 4-digit PIN", text: $pin) .keyboardType(.numberPad) @@ -45,13 +45,13 @@ struct PINSetupView: View { if newValue.count > 4 { pin = String(newValue.prefix(4)) } - + // Only allow numbers - if !newValue.allSatisfy({ $0.isNumber }) { - pin = newValue.filter { $0.isNumber } + if !newValue.allSatisfy(\.isNumber) { + pin = newValue.filter(\.isNumber) } } - + SecureField("Confirm PIN", text: $confirmPin) .keyboardType(.numberPad) .textContentType(.oneTimeCode) @@ -64,21 +64,21 @@ struct PINSetupView: View { if newValue.count > 4 { confirmPin = String(newValue.prefix(4)) } - + // Only allow numbers - if !newValue.allSatisfy({ $0.isNumber }) { - confirmPin = newValue.filter { $0.isNumber } + if !newValue.allSatisfy(\.isNumber) { + confirmPin = newValue.filter(\.isNumber) } } } - + if showError { Text(errorMessage) .foregroundColor(.red) .font(.callout) .padding(.top, 5) } - + Button(action: { savePIN() }) { @@ -87,16 +87,16 @@ struct PINSetupView: View { .padding() .frame(width: 200) .background( - (pin.count == 4 && confirmPin.count == 4) ? + (pin.count == 4 && confirmPin.count == 4) ? Color.blue : Color.gray ) .cornerRadius(10) } .disabled(pin.count != 4 || confirmPin.count != 4) .padding(.top, 20) - + Spacer() - + Text("Your PIN will be required when opening the app and when it returns from background.") .font(.caption) .foregroundColor(.secondary) @@ -110,7 +110,7 @@ struct PINSetupView: View { .screenCaptureProtected() } } - + private func savePIN() { // Validate PIN if pin.count != 4 { @@ -118,17 +118,17 @@ struct PINSetupView: View { errorMessage = "PIN must be 4 digits" return } - + // Check if PINs match if pin != confirmPin { showError = true errorMessage = "PINs do not match" return } - + // Save PIN pinManager.setPIN(pin) - + // Signal completion isPINSetupComplete = true } @@ -136,4 +136,4 @@ struct PINSetupView: View { #Preview { PINSetupView(isPINSetupComplete: .constant(false)) -} \ No newline at end of file +} diff --git a/SnapSafe/PINVerificationView.swift b/SnapSafe/Views/PINVerificationView.swift similarity index 87% rename from SnapSafe/PINVerificationView.swift rename to SnapSafe/Views/PINVerificationView.swift index 0630da9..f8f39f3 100644 --- a/SnapSafe/PINVerificationView.swift +++ b/SnapSafe/Views/PINVerificationView.swift @@ -13,21 +13,21 @@ struct PINVerificationView: View { @State private var showError = false @State private var attempts = 0 @Binding var isAuthenticated: Bool - + var body: some View { VStack(spacing: 30) { Image(systemName: "lock.shield") .font(.system(size: 70)) .foregroundColor(.blue) .padding(.top, 50) - + Text("SnapSafe") .font(.largeTitle) .bold() - + Text("Enter your PIN to continue") .foregroundColor(.secondary) - + SecureField("PIN", text: $pin) .keyboardType(.numberPad) .textContentType(.oneTimeCode) @@ -40,25 +40,25 @@ struct PINVerificationView: View { if newValue.count > 4 { pin = String(newValue.prefix(4)) } - + // Only allow numbers - if !newValue.allSatisfy({ $0.isNumber }) { - pin = newValue.filter { $0.isNumber } + if !newValue.allSatisfy(\.isNumber) { + pin = newValue.filter(\.isNumber) } - + // Auto-verify when 4 digits are entered if newValue.count == 4 { verifyPIN() } } - + if showError { Text("Invalid PIN. Please try again.") .foregroundColor(.red) .font(.callout) .padding(.top, 5) } - + Button(action: { verifyPIN() }) { @@ -71,7 +71,7 @@ struct PINVerificationView: View { } .disabled(pin.count != 4) .padding(.top, 20) - + Spacer() } .onAppear { @@ -81,23 +81,23 @@ struct PINVerificationView: View { .obscuredWhenInactive() .screenCaptureProtected() } - + private func verifyPIN() { if pinManager.verifyPIN(pin) { // PIN is correct - prioritize setting authentication flag // to trigger immediate transition print("PIN correct, transitioning immediately") - + // Update authentication state with high priority DispatchQueue.main.async(qos: .userInteractive) { - self.isAuthenticated = true - self.showError = false - + isAuthenticated = true + showError = false + // Update last active time after transition has started - self.pinManager.updateLastActiveTime() - + pinManager.updateLastActiveTime() + // Clear the PIN field for next time - self.pin = "" + pin = "" } } else { // PIN is incorrect @@ -105,7 +105,7 @@ struct PINVerificationView: View { showError = true attempts += 1 pin = "" - + // Could add more sophisticated security measures here, like // temporary lockout after multiple failed attempts } @@ -114,4 +114,4 @@ struct PINVerificationView: View { #Preview { PINVerificationView(isAuthenticated: .constant(false)) -} \ No newline at end of file +} diff --git a/SnapSafe/Views/PhotoCell.swift b/SnapSafe/Views/PhotoCell.swift new file mode 100644 index 0000000..9b03d59 --- /dev/null +++ b/SnapSafe/Views/PhotoCell.swift @@ -0,0 +1,63 @@ +// +// PhotoCell.swift +// SnapSafe +// +// Created by Bill Booth on 6/10/25. +// + +import SwiftUI + +// Photo cell view for gallery items +struct PhotoCell: View { + let photo: SecurePhoto + let isSelected: Bool + let isSelecting: Bool + let onTap: () -> Void + let onDelete: () -> Void + + // Track whether this cell is visible in the viewport + @State private var isVisible: Bool = false + + // Cell size + private let cellSize: CGFloat = 100 + + var body: some View { + ZStack(alignment: .topTrailing) { + // Photo image that fills the entire cell + Image(uiImage: photo.thumbnail) + .resizable() + .aspectRatio(contentMode: .fill) // Use .fill to cover the entire cell + .frame(width: cellSize, height: cellSize) + .clipped() // Clip any overflow + .cornerRadius(10) + .onTapGesture(perform: onTap) + .overlay( + RoundedRectangle(cornerRadius: 10) + .stroke(isSelected ? Color.blue : Color.clear, lineWidth: 3) + ) + // Track appearance/disappearance for memory management + .onAppear { + // This cell is now visible + isVisible = true + photo.isVisible = true + MemoryManager.shared.reportThumbnailLoaded() + } + .onDisappear { + // This cell is no longer visible + isVisible = false + photo.markAsInvisible() + // Let the memory manager know it can clean up if needed + MemoryManager.shared.checkMemoryUsage() + } + + // Selection checkmark when in selection mode and selected + if isSelecting, isSelected { + Image(systemName: "checkmark.circle.fill") + .font(.system(size: 24)) + .foregroundColor(.blue) + .background(Circle().fill(Color.white)) + .padding(5) + } + } + } +} diff --git a/SnapSafe/Views/PhotoDetail/Components/FaceBoxView.swift b/SnapSafe/Views/PhotoDetail/Components/FaceBoxView.swift index c2caa64..4db8ca8 100644 --- a/SnapSafe/Views/PhotoDetail/Components/FaceBoxView.swift +++ b/SnapSafe/Views/PhotoDetail/Components/FaceBoxView.swift @@ -5,8 +5,8 @@ // Created by Bill Booth on 5/20/25. // -import SwiftUI import Foundation +import SwiftUI import UIKit struct FaceBoxView: View { @@ -14,12 +14,12 @@ struct FaceBoxView: View { let originalSize: CGSize let displaySize: CGSize var onTap: () -> Void - + // Get the scaled rectangle based on the display size private var scaledRect: CGRect { face.scaledRect(originalSize: originalSize, displaySize: displaySize) } - + var body: some View { ZStack { // Draw the rectangle border with color based on selection state @@ -29,7 +29,7 @@ struct FaceBoxView: View { width: scaledRect.width, height: scaledRect.height ) - + // Show resize handles for selected faces if face.isSelected { Group { @@ -38,17 +38,17 @@ struct FaceBoxView: View { .fill(Color.white) .frame(width: 12, height: 12) .position(x: scaledRect.minX, y: scaledRect.minY) - + Circle() .fill(Color.white) .frame(width: 12, height: 12) .position(x: scaledRect.maxX, y: scaledRect.minY) - + Circle() .fill(Color.white) .frame(width: 12, height: 12) .position(x: scaledRect.minX, y: scaledRect.maxY) - + Circle() .fill(Color.white) .frame(width: 12, height: 12) @@ -77,13 +77,13 @@ struct FaceBoxView_Previews: PreviewProvider { rect: CGRect(x: 50, y: 50, width: 100, height: 100), isSelected: true ) - + return ZStack { Color.gray Image(systemName: "person.fill") .resizable() .frame(width: 200, height: 200) - + FaceBoxView( face: face, originalSize: CGSize(width: 400, height: 400), diff --git a/SnapSafe/Views/PhotoDetail/Components/FaceDetectionControlsView.swift b/SnapSafe/Views/PhotoDetail/Components/FaceDetectionControlsView.swift index 4f91a98..2356f39 100644 --- a/SnapSafe/Views/PhotoDetail/Components/FaceDetectionControlsView.swift +++ b/SnapSafe/Views/PhotoDetail/Components/FaceDetectionControlsView.swift @@ -15,7 +15,7 @@ struct FaceDetectionControlsView: View { var hasFacesSelected: Bool var faceCount: Int var selectedCount: Int - + var body: some View { VStack(spacing: 8) { HStack { @@ -26,9 +26,9 @@ struct FaceDetectionControlsView: View { .background(Color.gray) .cornerRadius(8) } - + Spacer() - + Button(action: onAddBox) { Label("Add Box", systemImage: "plus.rectangle") .foregroundColor(.white) @@ -36,9 +36,9 @@ struct FaceDetectionControlsView: View { .background(isAddingBox ? Color.green : Color.blue) .cornerRadius(8) } - + Spacer() - + Button(action: onMask) { Label("Mask Faces", systemImage: "eye.slash") .foregroundColor(.white) @@ -49,7 +49,7 @@ struct FaceDetectionControlsView: View { .disabled(!hasFacesSelected) } .padding(.horizontal) - + if isAddingBox { Text("Tap anywhere on the image to add a custom box") .font(.caption) @@ -61,7 +61,7 @@ struct FaceDetectionControlsView: View { .foregroundColor(.secondary) .padding(.horizontal) } - + if faceCount == 0 { Text("No faces detected") .font(.callout) @@ -91,4 +91,4 @@ struct FaceDetectionControlsView_Previews: PreviewProvider { ) } } -} \ No newline at end of file +} diff --git a/SnapSafe/Views/PhotoDetail/Components/FaceDetectionOverlay.swift b/SnapSafe/Views/PhotoDetail/Components/FaceDetectionOverlay.swift index 1ab0d39..fc497e6 100644 --- a/SnapSafe/Views/PhotoDetail/Components/FaceDetectionOverlay.swift +++ b/SnapSafe/Views/PhotoDetail/Components/FaceDetectionOverlay.swift @@ -5,8 +5,8 @@ // Created by Bill Booth on 5/20/25. // -import SwiftUI import Foundation +import SwiftUI import UIKit struct FaceDetectionOverlay: View { @@ -17,12 +17,12 @@ struct FaceDetectionOverlay: View { var onTap: (DetectedFace) -> Void var onCreateBox: (CGPoint) -> Void var onResize: (DetectedFace, CGFloat) -> Void - + // State for face resizing @State private var isResizingBox = false @State private var selectedFaceForResize: DetectedFace? = nil @State private var currentResizeScale: CGFloat = 1.0 - + var body: some View { ZStack { // Add a gesture overlay to capture exact tap locations for adding boxes @@ -37,7 +37,7 @@ struct FaceDetectionOverlay: View { } ) } - + // Overlay each detected face with a rectangle ForEach(faces) { face in FaceBoxView( @@ -45,7 +45,7 @@ struct FaceDetectionOverlay: View { originalSize: originalSize, displaySize: displaySize, onTap: { - if !isAddingBox && !isResizingBox { + if !isAddingBox, !isResizingBox { onTap(face) } } @@ -59,7 +59,7 @@ struct FaceDetectionOverlay: View { selectedFaceForResize = face currentResizeScale = 1.0 } - + // Only resize if this is the selected face if let selectedFace = selectedFaceForResize, selectedFace.id == face.id { let delta = value / currentResizeScale @@ -83,9 +83,9 @@ struct FaceDetectionOverlay_Previews: PreviewProvider { static var previews: some View { let faces = [ DetectedFace(rect: CGRect(x: 50, y: 50, width: 100, height: 100)), - DetectedFace(rect: CGRect(x: 200, y: 150, width: 120, height: 120), isSelected: true) + DetectedFace(rect: CGRect(x: 200, y: 150, width: 120, height: 120), isSelected: true), ] - + return ZStack { Color.gray FaceDetectionOverlay( diff --git a/SnapSafe/Views/PhotoDetail/Components/PhotoControlsView.swift b/SnapSafe/Views/PhotoDetail/Components/PhotoControlsView.swift index fb0128f..60c6fce 100644 --- a/SnapSafe/Views/PhotoDetail/Components/PhotoControlsView.swift +++ b/SnapSafe/Views/PhotoDetail/Components/PhotoControlsView.swift @@ -13,7 +13,7 @@ struct PhotoControlsView: View { var onShare: () -> Void var onDelete: () -> Void var isZoomed: Bool - + var body: some View { HStack(spacing: 30) { // Info button @@ -26,7 +26,7 @@ struct PhotoControlsView: View { } .foregroundColor(.blue) } - + // Obfuscate faces button Button(action: onObfuscate) { VStack { @@ -37,7 +37,7 @@ struct PhotoControlsView: View { } .foregroundColor(.blue) } - + // Share button Button(action: onShare) { VStack { @@ -48,7 +48,7 @@ struct PhotoControlsView: View { } .foregroundColor(.blue) } - + // Delete button Button(action: onDelete) { VStack { @@ -82,4 +82,4 @@ struct PhotoControlsView_Previews: PreviewProvider { ) } } -} \ No newline at end of file +} diff --git a/SnapSafe/Views/PhotoDetail/Components/ZoomLevelIndicator.swift b/SnapSafe/Views/PhotoDetail/Components/ZoomLevelIndicator.swift index 09e99bb..f6db91b 100644 --- a/SnapSafe/Views/PhotoDetail/Components/ZoomLevelIndicator.swift +++ b/SnapSafe/Views/PhotoDetail/Components/ZoomLevelIndicator.swift @@ -10,13 +10,13 @@ import SwiftUI struct ZoomLevelIndicator: View { let scale: CGFloat let isVisible: Bool - + var body: some View { ZStack { Capsule() .fill(Color.black.opacity(0.7)) .frame(width: 70, height: 30) - + Text(String(format: "%.1fx", scale)) .font(.system(size: 14, weight: .bold)) .foregroundColor(.white) @@ -34,4 +34,4 @@ struct ZoomLevelIndicator_Previews: PreviewProvider { ZoomLevelIndicator(scale: 2.5, isVisible: true) } } -} \ No newline at end of file +} diff --git a/SnapSafe/Views/PhotoDetail/Components/ZoomableImageView.swift b/SnapSafe/Views/PhotoDetail/Components/ZoomableImageView.swift index 233edd3..1f27dde 100644 --- a/SnapSafe/Views/PhotoDetail/Components/ZoomableImageView.swift +++ b/SnapSafe/Views/PhotoDetail/Components/ZoomableImageView.swift @@ -17,26 +17,29 @@ struct ImageSizePreferenceKey: PreferenceKey { struct ZoomableImageView: View { // MARK: – Inputs + let image: UIImage let geometrySize: CGSize let canGoToPrevious: Bool - let canGoToNext: Bool + let canGoToNext: Bool let onNavigatePrevious: () -> Void - let onNavigateNext: () -> Void - let onDismiss: () -> Void - let imageRotation: Double + let onNavigateNext: () -> Void + let onDismiss: () -> Void + let imageRotation: Double let isFaceDetectionActive: Bool @ViewBuilder var overlay: () -> Overlay // MARK: – Zoom / pan state + @State private var scale: CGFloat = 1 @State private var lastScale: CGFloat = 1 - @State private var panOffset = CGSize.zero // when zoomed - @State private var accumulatedPan = CGSize.zero // keeps panning between drags + @State private var panOffset = CGSize.zero // when zoomed + @State private var accumulatedPan = CGSize.zero // keeps panning between drags // MARK: – Temporary drag state (non-zoomed) - @State private var swipeOffset: CGFloat = 0 // horizontal swipe - @State private var verticalDrag: CGFloat = 0 // pull-down + + @State private var swipeOffset: CGFloat = 0 // horizontal swipe + @State private var verticalDrag: CGFloat = 0 // pull-down var body: some View { GeometryReader { g in @@ -51,7 +54,6 @@ struct ZoomableImageView: View { .clipped() .overlay(overlay()) .ignoresSafeArea() - // ---------- Pinch to zoom ---------- .gesture( MagnificationGesture() @@ -70,7 +72,7 @@ struct ZoomableImageView: View { .simultaneousGesture( DragGesture() .onChanged { value in - if scale > 1 { // PANNING + if scale > 1 { // PANNING panOffset = value.translation return } @@ -79,35 +81,35 @@ struct ZoomableImageView: View { let dx = value.translation.width let dy = value.translation.height - if abs(dx) > abs(dy) { // HORIZONTAL SWIPE - swipeOffset = dx // live follow - } else if dy > 0 { // VERTICAL PULL-DOWN - verticalDrag = dy * 0.7 // some resistance + if abs(dx) > abs(dy) { // HORIZONTAL SWIPE + swipeOffset = dx // live follow + } else if dy > 0 { // VERTICAL PULL-DOWN + verticalDrag = dy * 0.7 // some resistance } } .onEnded { value in - if scale > 1 { // finish panning + if scale > 1 { // finish panning accumulatedPan.width += panOffset.width accumulatedPan.height += panOffset.height panOffset = .zero return } - guard !isFaceDetectionActive else { resetNonZoom() ; return } + guard !isFaceDetectionActive else { resetNonZoom(); return } let dx = value.translation.width let dy = value.translation.height - if abs(dx) > abs(dy) { // ------------ PAGE ------------ + if abs(dx) > abs(dy) { // ------------ PAGE ------------ let threshold = geometrySize.width / 4 let quick = abs(value.velocity.width) > 500 let quickTh = geometrySize.width / 8 - if (dx > threshold || (quick && dx > quickTh)) && canGoToPrevious { + if dx > threshold || (quick && dx > quickTh), canGoToPrevious { onNavigatePrevious() - } else if (dx < -threshold || (quick && dx < -quickTh)) && canGoToNext { + } else if dx < -threshold || (quick && dx < -quickTh), canGoToNext { onNavigateNext() } - } else if dy > 0 { // ----------- DISMISS ---------- + } else if dy > 0 { // ----------- DISMISS ---------- let threshold = geometrySize.height * 0.25 let quick = value.velocity.height > 800 if dy > threshold || quick { @@ -129,20 +131,20 @@ struct ZoomableImageView: View { private func resetNonZoom() { withAnimation(.spring) { - swipeOffset = 0 - verticalDrag = 0 + swipeOffset = 0 + verticalDrag = 0 } } } -//struct ZoomableImageView: View { +// struct ZoomableImageView: View { // // Image to display // let image: UIImage -// +// // // Geometry and size // let geometrySize: CGSize // @Binding var imageFrameSize: CGSize -// +// // // Zoom and pan state // @Binding var currentScale: CGFloat // @Binding var lastScale: CGFloat @@ -151,11 +153,11 @@ struct ZoomableImageView: View { // @Binding var lastDragPosition: CGSize // @Binding var isZoomed: Bool // @Binding var isSwiping: Bool -// +// // // Dismiss gesture state // @State private var verticalOffset: CGFloat = 0 // @State private var dismissProgress: CGFloat = 0 -// +// // // Navigation state // let canGoToPrevious: Bool // let canGoToNext: Bool @@ -164,17 +166,17 @@ struct ZoomableImageView: View { // var onDismiss: () -> Void // var onReset: () -> Void // let imageRotation: Double -// +// // // Face detection state // let isFaceDetectionActive: Bool -// +// // // Orientation properties // private var isLandscape: Bool { // // Consider the image orientation // let orientation = image.imageOrientation.rawValue // // Orientations 5-8 are 90/270 degree rotations // let isRotated = orientation >= 5 && orientation <= 8 -// +// // if isRotated { // // For rotated images, swap dimensions for comparison // return image.size.height > image.size.width @@ -183,13 +185,13 @@ struct ZoomableImageView: View { // return image.size.width > image.size.height // } // } -// +// // // Device orientation state // @State private var deviceOrientation = UIDevice.current.orientation -// +// // // Custom overlay // @ViewBuilder var overlay: () -> Overlay -// +// // var body: some View { // GeometryReader { geometry in // ZStack { @@ -197,7 +199,7 @@ struct ZoomableImageView: View { // Color(UIColor.systemBackground) // .opacity(1.0 - dismissProgress * 0.7) // .edgesIgnoringSafeArea(.all) -// +// // // Image display that fills the entire screen // Image(uiImage: image) // .resizable() @@ -224,13 +226,13 @@ struct ZoomableImageView: View { // .onPreferenceChange(ImageSizePreferenceKey.self) { size in // self.imageFrameSize = size // } -// +// // // Custom overlay content // overlay() // } // } // ) -// +// // Spacer() // } // } @@ -245,19 +247,19 @@ struct ZoomableImageView: View { // .onChanged { value in // let delta = value / lastScale // lastScale = value -// +// // // Apply zoom with a smoothing factor // let newScale = currentScale * delta // // Limit the scale to reasonable bounds // currentScale = min(max(newScale, 0.5), 6.0) -// +// // // Update zoomed state for UI adjustments // isZoomed = currentScale > 1.1 // } // .onEnded { _ in // // Reset lastScale for next gesture // lastScale = 1.0 -// +// // // Check if we should return to the gallery // if currentScale < 0.6, !isFaceDetectionActive { // // User has pinched out enough to dismiss @@ -282,24 +284,24 @@ struct ZoomableImageView: View { // } else if !isFaceDetectionActive { // let horizontalMovement = abs(gesture.translation.width) // let verticalMovement = abs(gesture.translation.height) -// +// // print("🟡 ZoomableImageView onChanged: H:\(horizontalMovement), V:\(verticalMovement)") -// +// // if horizontalMovement > verticalMovement { // // Handle horizontal navigation // let threshold: CGFloat = geometrySize.width / 4 // let velocity = gesture.velocity.width -// +// // // Check for swipe velocity (quick swipe with lower threshold) // let isQuickSwipe = abs(velocity) > 500 // let quickSwipeThreshold: CGFloat = geometrySize.width / 8 -// +// // if (offset > threshold || (isQuickSwipe && offset > quickSwipeThreshold)) && canGoToPrevious { // onNavigatePrevious() // } else if (offset < -threshold || (isQuickSwipe && offset < -quickSwipeThreshold)) && canGoToNext { // onNavigateNext() // } -// +// // // Reset the offset with animation // withAnimation(.spring(response: 0.5, dampingFraction: 0.8)) { // offset = 0 @@ -310,7 +312,7 @@ struct ZoomableImageView: View { // let dismissThreshold: CGFloat = geometrySize.height * 0.25 // let velocity = gesture.velocity.height // let isQuickDownSwipe = velocity > 800 -// +// // if gesture.translation.height > dismissThreshold || isQuickDownSwipe { // print("🔴 ZoomableImageView onChanged: DISMISS TRIGGERED!") // print(" - Translation height: \(gesture.translation.height)") @@ -337,31 +339,31 @@ struct ZoomableImageView: View { // width: max(-maxDragDistance, min(maxDragDistance, dragOffset.width)), // height: max(-maxDragDistance, min(maxDragDistance, dragOffset.height)) // ) -// +// // // Update dragOffset with the constrained value // self.dragOffset = constrainedOffset -// +// // // Save this position as the new reference point for the next drag // self.lastDragPosition = constrainedOffset // } else if !isFaceDetectionActive { // let horizontalMovement = abs(gesture.translation.width) // let verticalMovement = abs(gesture.translation.height) -// +// // if horizontalMovement > verticalMovement { // // Handle horizontal navigation // let threshold: CGFloat = geometrySize.width / 4 // let velocity = gesture.velocity.width -// +// // // Check for swipe velocity (quick swipe with lower threshold) // let isQuickSwipe = abs(velocity) > 500 // let quickSwipeThreshold: CGFloat = geometrySize.width / 8 -// +// // if (offset > threshold || (isQuickSwipe && offset > quickSwipeThreshold)) && canGoToPrevious { // onNavigatePrevious() // } else if (offset < -threshold || (isQuickSwipe && offset < -quickSwipeThreshold)) && canGoToNext { // onNavigateNext() // } -// +// // // Reset the offset with animation // withAnimation(.spring(response: 0.5, dampingFraction: 0.8)) { // offset = 0 @@ -373,7 +375,7 @@ struct ZoomableImageView: View { // let dismissThreshold: CGFloat = geometrySize.height * 0.25 // let velocity = gesture.velocity.height // let isQuickDownSwipe = velocity > 800 -// +// // if gesture.translation.height > dismissThreshold || isQuickDownSwipe { // print("🔴 ZoomableImageView onEnded: DISMISS TRIGGERED!") // print(" - Translation height: \(gesture.translation.height)") @@ -407,10 +409,10 @@ struct ZoomableImageView: View { // } // } // } -//} +// } // Preview with sample image -//struct ZoomableImageView_Previews: PreviewProvider { +// struct ZoomableImageView_Previews: PreviewProvider { // static var previews: some View { // GeometryReader { geometry in // ZoomableImageView( @@ -442,4 +444,4 @@ struct ZoomableImageView: View { // } // } // } -//} +// } diff --git a/SnapSafe/Views/PhotoDetail/EnhancedPhotoDetailView.swift b/SnapSafe/Views/PhotoDetail/EnhancedPhotoDetailView.swift index bc0c7cb..1012899 100644 --- a/SnapSafe/Views/PhotoDetail/EnhancedPhotoDetailView.swift +++ b/SnapSafe/Views/PhotoDetail/EnhancedPhotoDetailView.swift @@ -13,29 +13,29 @@ struct EnhancedPhotoDetailView: View { let showFaceDetection: Bool let onDelete: ((SecurePhoto) -> Void)? let onDismiss: (() -> Void)? - + @Environment(\.dismiss) private var dismiss @State private var dragOffset: CGSize = .zero @State private var dismissProgress: CGFloat = 0 @State private var isTabViewTransitioning: Bool = false - @State private var lastIndexChangeTime: Date = Date() - + @State private var lastIndexChangeTime: Date = .init() + init(allPhotos: [SecurePhoto], initialIndex: Int, showFaceDetection: Bool, onDelete: ((SecurePhoto) -> Void)? = nil, onDismiss: (() -> Void)? = nil) { self.allPhotos = allPhotos - self._currentIndex = State(initialValue: initialIndex) + _currentIndex = State(initialValue: initialIndex) self.showFaceDetection = showFaceDetection self.onDelete = onDelete self.onDismiss = onDismiss } - + var body: some View { - GeometryReader { geometry in + GeometryReader { _ in ZStack { // Background that fades during dismiss Color.black .opacity(1.0 - dismissProgress * 0.8) .edgesIgnoringSafeArea(.all) - + TabView(selection: $currentIndex) { ForEach(Array(allPhotos.enumerated()), id: \.element.id) { index, photo in PhotoDetailView_Impl( @@ -55,22 +55,22 @@ struct EnhancedPhotoDetailView: View { // // Track when TabView transitions occur // isTabViewTransitioning = true // lastIndexChangeTime = Date() -// +// // // Reset any dismiss progress during navigation // withAnimation(.easeOut(duration: 0.2)) { // dragOffset = .zero // dismissProgress = 0 // } -// +// // // Preload adjacent photos when index changes // preloadAdjacentPhotos(currentIndex: newIndex) -// +// // Clear transition state after a delay // DispatchQueue.main.asyncAfter(deadline: .now() + 0.8) { // isTabViewTransitioning = false // } // } - + // Photo counter overlay VStack { HStack { @@ -86,14 +86,14 @@ struct EnhancedPhotoDetailView: View { Spacer() } .padding(.top, 50) - + Spacer() } } .obscuredWhenInactive() .gesture( DragGesture() - .onChanged { value in + .onChanged { _ in // Bail out until the drag is clearly vertical // guard abs(value.translation.height) > // abs(value.translation.width) else { return } @@ -102,7 +102,7 @@ struct EnhancedPhotoDetailView: View { // dismissProgress = min(value.translation.height / // (geometry.size.height * 0.4), 1.0) } - .onEnded { value in + .onEnded { _ in // Same dominant-axis guard here *before* any threshold checks // guard abs(value.translation.height) > // abs(value.translation.width) else { return } @@ -132,7 +132,7 @@ struct EnhancedPhotoDetailView: View { preloadAdjacentPhotos(currentIndex: currentIndex) } } - + private func preloadAdjacentPhotos(currentIndex: Int) { // Preload previous photo if currentIndex > 0 { @@ -141,7 +141,7 @@ struct EnhancedPhotoDetailView: View { _ = previousPhoto.thumbnail } } - + // Preload next photo if currentIndex < allPhotos.count - 1 { let nextPhoto = allPhotos[currentIndex + 1] diff --git a/SnapSafe/Views/PhotoDetail/ImageInfoView.swift b/SnapSafe/Views/PhotoDetail/ImageInfoView.swift index 802e35a..004fbd0 100644 --- a/SnapSafe/Views/PhotoDetail/ImageInfoView.swift +++ b/SnapSafe/Views/PhotoDetail/ImageInfoView.swift @@ -5,15 +5,15 @@ // Created by Bill Booth on 5/20/25. // -import SwiftUI import CoreGraphics import ImageIO +import SwiftUI // View for displaying image metadata struct ImageInfoView: View { let photo: SecurePhoto @Environment(\.dismiss) private var dismiss - + // Helper function to format bytes to readable size private func formatFileSize(bytes: Int) -> String { let formatter = ByteCountFormatter() @@ -21,7 +21,7 @@ struct ImageInfoView: View { formatter.countStyle = .file return formatter.string(fromByteCount: Int64(bytes)) } - + // Helper to format date private func formatDate(_ date: Date) -> String { let formatter = DateFormatter() @@ -29,23 +29,23 @@ struct ImageInfoView: View { formatter.timeStyle = .medium return formatter.string(from: date) } - + // Helper to interpret orientation private func orientationString(from value: Int) -> String { switch value { - case 1: return "Normal" - case 3: return "Rotated 180°" - case 6: return "Rotated 90° CW" - case 8: return "Rotated 90° CCW" - default: return "Unknown (\(value))" + case 1: "Normal" + case 3: "Rotated 180°" + case 6: "Rotated 90° CW" + case 8: "Rotated 90° CCW" + default: "Unknown (\(value))" } } - + // Extract location data from EXIF private func locationString(from metadata: [String: Any]) -> String { if let gpsData = metadata[String(kCGImagePropertyGPSDictionary)] as? [String: Any] { var locationParts: [String] = [] - + // Extract latitude if let latitudeRef = gpsData[String(kCGImagePropertyGPSLatitudeRef)] as? String, let latitude = gpsData[String(kCGImagePropertyGPSLatitude)] as? Double @@ -53,7 +53,7 @@ struct ImageInfoView: View { let latDirection = latitudeRef == "N" ? "N" : "S" locationParts.append(String(format: "%.6f°%@", latitude, latDirection)) } - + // Extract longitude if let longitudeRef = gpsData[String(kCGImagePropertyGPSLongitudeRef)] as? String, let longitude = gpsData[String(kCGImagePropertyGPSLongitude)] as? Double @@ -61,36 +61,36 @@ struct ImageInfoView: View { let longDirection = longitudeRef == "E" ? "E" : "W" locationParts.append(String(format: "%.6f°%@", longitude, longDirection)) } - + // Include altitude if available if let altitude = gpsData[String(kCGImagePropertyGPSAltitude)] as? Double { locationParts.append(String(format: "Alt: %.1fm", altitude)) } - + return locationParts.isEmpty ? "Not available" : locationParts.joined(separator: ", ") } - + return "Not available" } - + var body: some View { NavigationView { Form { Section(header: Text("Basic Information")) { HStack { - Text("Filename") + Text("ID") Spacer() - Text(photo.filename) + Text(photo.id) .foregroundColor(.secondary) } - + HStack { Text("Resolution") Spacer() Text("\(Int(photo.fullImage.size.width)) × \(Int(photo.fullImage.size.height))") .foregroundColor(.secondary) } - + if let imageData = photo.fullImage.jpegData(compressionQuality: 1.0) { HStack { Text("File Size") @@ -100,126 +100,47 @@ struct ImageInfoView: View { } } } - + Section(header: Text("Date Information")) { - if let creationDate = photo.metadata["creationDate"] as? Double { - HStack { - Text("Date Taken") - Spacer() - Text(formatDate(Date(timeIntervalSince1970: creationDate))) - .foregroundColor(.secondary) - } - } else { - Text("No date information available") + HStack { + Text("Date Taken") + Spacer() + Text(formatDate(photo.metadata.creationDate)) .foregroundColor(.secondary) } - - if let exifDict = photo.metadata[String(kCGImagePropertyExifDictionary)] as? [String: Any], - let dateTimeOriginal = exifDict[String(kCGImagePropertyExifDateTimeOriginal)] as? String - { - HStack { - Text("Original Date") - Spacer() - Text(dateTimeOriginal) - .foregroundColor(.secondary) - } + + HStack { + Text("Date Modified") + Spacer() + Text(formatDate(photo.metadata.modificationDate)) + .foregroundColor(.secondary) } + // TODO: Add EXIF data support to PhotoMetadata if needed } - - Section(header: Text("Orientation")) { - if let tiffDict = photo.metadata[String(kCGImagePropertyTIFFDictionary)] as? [String: Any], - let orientation = tiffDict[String(kCGImagePropertyTIFFOrientation)] as? Int - { - HStack { - Text("Orientation") - Spacer() - Text(orientationString(from: orientation)) - .foregroundColor(.secondary) - } - } else { - Text("Normal") + + Section(header: Text("Photo Details")) { + HStack { + Text("File Size") + Spacer() + Text(ByteCountFormatter.string(fromByteCount: Int64(photo.metadata.fileSize), countStyle: .file)) .foregroundColor(.secondary) } - } - - Section(header: Text("Location")) { - Text(locationString(from: photo.metadata)) - .foregroundColor(.secondary) - } - - Section(header: Text("Camera Information")) { - if let exifDict = photo.metadata[String(kCGImagePropertyExifDictionary)] as? [String: Any] { - if let make = (photo.metadata[String(kCGImagePropertyTIFFDictionary)] as? [String: Any])?[String(kCGImagePropertyTIFFMake)] as? String, - let model = (photo.metadata[String(kCGImagePropertyTIFFDictionary)] as? [String: Any])?[String(kCGImagePropertyTIFFModel)] as? String - { - HStack { - Text("Camera") - Spacer() - Text("\(make) \(model)") - .foregroundColor(.secondary) - } - } - - if let fNumber = exifDict[String(kCGImagePropertyExifFNumber)] as? Double { - HStack { - Text("Aperture") - Spacer() - Text(String(format: "f/%.1f", fNumber)) - .foregroundColor(.secondary) - } - } - - if let exposureTime = exifDict[String(kCGImagePropertyExifExposureTime)] as? Double { - HStack { - Text("Shutter Speed") - Spacer() - Text("\(exposureTime < 1 ? "1/\(Int(1 / exposureTime))" : String(format: "%.1f", exposureTime))s") - .foregroundColor(.secondary) - } - } - - if let isoValue = exifDict[String(kCGImagePropertyExifISOSpeedRatings)] as? [Int], - let iso = isoValue.first - { - HStack { - Text("ISO") - Spacer() - Text("\(iso)") - .foregroundColor(.secondary) - } - } - - if let focalLength = exifDict[String(kCGImagePropertyExifFocalLength)] as? Double { - HStack { - Text("Focal Length") - Spacer() - Text("\(Int(focalLength))mm") - .foregroundColor(.secondary) - } - } - } else { - Text("No camera information available") + + HStack { + Text("Faces Detected") + Spacer() + Text("\(photo.metadata.faces.count)") .foregroundColor(.secondary) } - } - - // Display all raw metadata for debugging - if photo.metadata.count > 0 { - Section(header: Text("All Metadata")) { - DisclosureGroup("Raw Metadata") { - ForEach(photo.metadata.keys.sorted(), id: \.self) { key in - VStack(alignment: .leading) { - Text(key) - .font(.headline) - .foregroundColor(.blue) - Text("\(String(describing: photo.metadata[key]!))") - .font(.caption) - } - .padding(.vertical, 4) - } - } + + HStack { + Text("Mask Mode") + Spacer() + Text(photo.metadata.maskMode.displayName) + .foregroundColor(.secondary) } } + // TODO: Add camera information section when EXIF support is added to PhotoMetadata } .navigationTitle("Image Information") .navigationBarTitleDisplayMode(.inline) @@ -232,4 +153,4 @@ struct ImageInfoView: View { } } } -} \ No newline at end of file +} diff --git a/SnapSafe/Views/PhotoDetail/Modifiers/ZoomableModifier.swift b/SnapSafe/Views/PhotoDetail/Modifiers/ZoomableModifier.swift index 98df265..c244096 100644 --- a/SnapSafe/Views/PhotoDetail/Modifiers/ZoomableModifier.swift +++ b/SnapSafe/Views/PhotoDetail/Modifiers/ZoomableModifier.swift @@ -15,7 +15,7 @@ struct ZoomableModifier: ViewModifier { @State private var initialScale: CGFloat = 1.0 var onZoomOut: () -> Void var onZoomChange: ((Bool) -> Void)? = nil - + func body(content: Content) -> some View { content .scaleEffect(scale) @@ -23,7 +23,7 @@ struct ZoomableModifier: ViewModifier { .gesture(makeZoomGesture()) .gesture(makeDragGesture()) } - + // Create a pinch/zoom gesture private func makeZoomGesture() -> some Gesture { MagnificationGesture() @@ -31,18 +31,18 @@ struct ZoomableModifier: ViewModifier { // Calculate new scale relative to the starting scale let delta = value / lastScale lastScale = value - + let newScale = scale * delta // Limit the scale to reasonable bounds scale = min(max(newScale, 0.5), 6.0) - + // Call callback when zoom state changes onZoomChange?(scale > 1.0) } .onEnded { _ in // Reset the lastScale for the next gesture lastScale = 1.0 - + // If user zoomed out below threshold, trigger dismiss if scale < 0.6 { onZoomOut() @@ -55,16 +55,16 @@ struct ZoomableModifier: ViewModifier { } } } - + // Create a drag gesture for panning private func makeDragGesture() -> some Gesture { DragGesture() .onChanged { value in // Only enable drag when zoomed in if scale > 1.0 { - self.offset = CGSize( - width: self.offset.width + value.translation.width, - height: self.offset.height + value.translation.height + offset = CGSize( + width: offset.width + value.translation.width, + height: offset.height + value.translation.height ) } } @@ -76,4 +76,4 @@ extension View { func zoomable(scale: Binding, offset: Binding, lastScale: Binding, onZoomOut: @escaping () -> Void, onZoomChange: ((Bool) -> Void)? = nil) -> some View { modifier(ZoomableModifier(scale: scale, offset: offset, lastScale: lastScale, onZoomOut: onZoomOut, onZoomChange: onZoomChange)) } -} \ No newline at end of file +} diff --git a/SnapSafe/Views/PhotoDetail/PhotoDetail.swift b/SnapSafe/Views/PhotoDetail/PhotoDetail.swift index cd850ee..15e6ba1 100644 --- a/SnapSafe/Views/PhotoDetail/PhotoDetail.swift +++ b/SnapSafe/Views/PhotoDetail/PhotoDetail.swift @@ -8,4 +8,4 @@ import SwiftUI // This file exists to help with imports -// The actual implementation is handled through the root PhotoDetailView.swift \ No newline at end of file +// The actual implementation is handled through the root PhotoDetailView.swift diff --git a/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift b/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift index 9d9e314..7f2d6ad 100644 --- a/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift +++ b/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift @@ -14,10 +14,10 @@ import UIKit struct PhotoDetailView_Impl: View { // ViewModel @StateObject private var viewModel: PhotoDetailViewModel - + // Environment @Environment(\.dismiss) private var dismiss - + // Initialize with a single photo init(photo: SecurePhoto, showFaceDetection: Bool, onDelete: ((SecurePhoto) -> Void)? = nil, onDismiss: (() -> Void)? = nil) { _viewModel = StateObject(wrappedValue: PhotoDetailViewModel( @@ -27,7 +27,7 @@ struct PhotoDetailView_Impl: View { onDismiss: onDismiss )) } - + // Initialize with multiple photos init(allPhotos: [SecurePhoto], initialIndex: Int, showFaceDetection: Bool, onDelete: ((SecurePhoto) -> Void)? = nil, onDismiss: (() -> Void)? = nil) { _viewModel = StateObject(wrappedValue: PhotoDetailViewModel( @@ -38,14 +38,14 @@ struct PhotoDetailView_Impl: View { onDismiss: onDismiss )) } - + var body: some View { GeometryReader { geometry in ZStack { // Background color Color.black.opacity(0.05) .edgesIgnoringSafeArea(.all) - + VStack { // Photo counter at the top if we have multiple photos if !viewModel.allPhotos.isEmpty { @@ -55,28 +55,28 @@ struct PhotoDetailView_Impl: View { .padding(.top, 8) .opacity(viewModel.isZoomed ? 0.5 : 1.0) // Fade when zoomed } - + Spacer() - + // Zoom level indicator ZoomLevelIndicator( scale: viewModel.currentScale, isVisible: viewModel.isZoomed ) - + // Centered image display with appropriate orientation handling ZoomableImageView( image: viewModel.displayedImage, geometrySize: geometry.size, canGoToPrevious: viewModel.canGoToPrevious, - canGoToNext: viewModel.canGoToNext, + canGoToNext: viewModel.canGoToNext, onNavigatePrevious: viewModel.navigateToPrevious, - onNavigateNext: viewModel.navigateToNext, + onNavigateNext: viewModel.navigateToNext, onDismiss: { viewModel.onDisappear() dismiss() }, - imageRotation: viewModel.imageRotation, + imageRotation: viewModel.imageRotation, isFaceDetectionActive: viewModel.isFaceDetectionActive ) { // Face detection overlay @@ -99,13 +99,13 @@ struct PhotoDetailView_Impl: View { let newX = centerX - newWidth / 2 let newY = centerY - newHeight / 2 let newRect = CGRect(x: newX, y: newY, width: newWidth, height: newHeight) - + let resizedFace = DetectedFace( bounds: newRect, isSelected: face.isSelected, isUserCreated: face.isUserCreated ) - + var updatedFaces = viewModel.detectedFaces updatedFaces[index] = resizedFace viewModel.detectedFaces = updatedFaces @@ -115,15 +115,15 @@ struct PhotoDetailView_Impl: View { } } .frame(maxWidth: .infinity, maxHeight: geometry.size.height * 0.7) - + Spacer() - + // Processing indicator if viewModel.processingFaces { ProgressView("Detecting faces...") .padding() } - + // Controls - conditionally show face detection controls or standard controls if viewModel.isFaceDetectionActive { FaceDetectionControlsView( @@ -144,7 +144,7 @@ struct PhotoDetailView_Impl: View { isAddingBox: false, hasFacesSelected: viewModel.hasFacesSelected, faceCount: viewModel.detectedFaces.count, - selectedCount: viewModel.detectedFaces.filter({ $0.isSelected }).count + selectedCount: viewModel.detectedFaces.count(where: { $0.isSelected }) ) } else { PhotoControlsView( @@ -154,9 +154,9 @@ struct PhotoDetailView_Impl: View { print("Share button pressed - showing share sheet") viewModel.sharePhoto() }, - onDelete: { + onDelete: { print("Delete button pressed - showing confirmation") - viewModel.showDeleteConfirmation = true + viewModel.showDeleteConfirmation = true }, isZoomed: viewModel.isZoomed ) @@ -215,7 +215,7 @@ struct PhotoDetailView_Impl: View { viewModel.selectedMaskMode = .noise viewModel.showBlurConfirmation = true }, - .cancel() + .cancel(), ] ) } @@ -227,6 +227,6 @@ struct PhotoDetailView_Impl: View { } } } - + // No additional helpers needed now } diff --git a/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift b/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift index 4da1341..21bcbc8 100644 --- a/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift +++ b/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift @@ -5,32 +5,32 @@ // Created by Bill Booth on 5/20/25. // -import UIKit import SwiftUI +import UIKit class PhotoDetailViewModel: ObservableObject { private var photo: SecurePhoto? - + @Published var allPhotos: [SecurePhoto] = [] @Published var currentIndex: Int = 0 - + // Callback handlers var onDelete: ((SecurePhoto) -> Void)? var onDismiss: (() -> Void)? - + // UI state variables @Published var showDeleteConfirmation = false @Published var imageRotation: Double = 0 @Published var offset: CGFloat = 0 @Published var isSwiping: Bool = false - + // Zoom and pan states @Published var currentScale: CGFloat = 1.0 @Published var dragOffset: CGSize = .zero @Published var lastScale: CGFloat = 1.0 @Published var isZoomed: Bool = false @Published var lastDragPosition: CGSize = .zero - + // Face detection states @Published var isFaceDetectionActive = false @Published var detectedFaces: [DetectedFace] = [] @@ -39,34 +39,35 @@ class PhotoDetailViewModel: ObservableObject { @Published var showBlurConfirmation = false @Published var selectedMaskMode: MaskMode = .blur @Published var showMaskOptions = false - + @Published var showImageInfo = false - + @Published var imageFrameSize: CGSize = .zero - + private let faceDetector = FaceDetector() private let secureFileManager = SecureFileManager() - + let showFaceDetection: Bool - + // MARK: - Initialization - + init(photo: SecurePhoto, showFaceDetection: Bool, onDelete: ((SecurePhoto) -> Void)? = nil, onDismiss: (() -> Void)? = nil) { self.photo = photo self.showFaceDetection = showFaceDetection self.onDelete = onDelete self.onDismiss = onDismiss } - + init(allPhotos: [SecurePhoto], initialIndex: Int, showFaceDetection: Bool, onDelete: ((SecurePhoto) -> Void)? = nil, onDismiss: (() -> Void)? = nil) { self.allPhotos = allPhotos - self.currentIndex = initialIndex + currentIndex = initialIndex self.showFaceDetection = showFaceDetection self.onDelete = onDelete self.onDismiss = onDismiss } - + // MARK: - Computed Properties + var currentPhoto: SecurePhoto { if !allPhotos.isEmpty { for (index, photo) in allPhotos.enumerated() { @@ -81,15 +82,16 @@ class PhotoDetailViewModel: ObservableObject { } } return allPhotos[currentIndex] - } else if let photo = photo { + } else if let photo { photo.isVisible = true return photo } else { // Should never happen but just in case - return SecurePhoto(filename: "", thumbnail: UIImage(), fullImage: UIImage(), metadata: [:]) + let emptyMetadata = PhotoMetadata(id: UUID().uuidString, fileSize: 0) + return SecurePhoto(id: UUID().uuidString, encryptedData: Data(), metadata: emptyMetadata) } } - + var displayedImage: UIImage { if isFaceDetectionActive, let modified = modifiedImage { return modified @@ -101,73 +103,79 @@ class PhotoDetailViewModel: ObservableObject { return image } } - + var canGoToPrevious: Bool { !allPhotos.isEmpty && currentIndex > 0 } - + var canGoToNext: Bool { !allPhotos.isEmpty && currentIndex < allPhotos.count - 1 } - + var hasFacesSelected: Bool { detectedFaces.contains { $0.isSelected } } - + var maskActionTitle: String { switch selectedMaskMode { + case .none: + "No Masking" case .blur: - return "Blur Selected Faces" + "Blur Selected Faces" case .pixelate: - return "Pixelate Selected Faces" + "Pixelate Selected Faces" case .blackout: - return "Blackout Selected Faces" + "Blackout Selected Faces" case .noise: - return "Apply Noise to Selected Faces" + "Apply Noise to Selected Faces" } } - + var maskActionVerb: String { switch selectedMaskMode { + case .none: + "remove masking from" case .blur: - return "blur" + "blur" case .pixelate: - return "pixelate" + "pixelate" case .blackout: - return "blackout" + "blackout" case .noise: - return "apply noise to" + "apply noise to" } } - + var maskButtonLabel: String { switch selectedMaskMode { + case .none: + "No Masking" case .blur: - return "Blur Faces" + "Blur Faces" case .pixelate: - return "Pixelate Faces" + "Pixelate Faces" case .blackout: - return "Blackout Faces" + "Blackout Faces" case .noise: - return "Apply Noise" + "Apply Noise" } } - + // MARK: - Face Detection Methods - + func detectFaces() { withAnimation { isFaceDetectionActive = true processingFaces = true } - + detectedFaces = [] modifiedImage = nil - + DispatchQueue.global(qos: .userInitiated).async { autoreleasepool { let imageToProcess = self.currentPhoto.fullImage - + self.faceDetector.detectFaces(in: imageToProcess) { faces in DispatchQueue.main.async { withAnimation { @@ -179,27 +187,27 @@ class PhotoDetailViewModel: ObservableObject { } } } - + func toggleFaceSelection(_ face: DetectedFace) { if let index = detectedFaces.firstIndex(where: { $0.id == face.id }) { - let updatedFaces = detectedFaces + var updatedFaces = detectedFaces updatedFaces[index].isSelected.toggle() detectedFaces = updatedFaces } } - + func applyFaceMasking() { withAnimation { processingFaces = true } - + DispatchQueue.global(qos: .userInitiated).async { autoreleasepool { let imageToProcess = self.currentPhoto.fullImage let facesToMask = self.detectedFaces let metadataCopy = self.currentPhoto.metadata let maskMode = self.selectedMaskMode - + // Process the image if let maskedImage = self.faceDetector.maskFaces(in: imageToProcess, faces: facesToMask, modes: [maskMode]) { // Save the masked image to the file system @@ -210,15 +218,15 @@ class PhotoDetailViewModel: ObservableObject { print("Error creating JPEG data") return } - + do { - _ = try self.secureFileManager.savePhoto(imageData, withMetadata: metadataCopy, isEdited: true, originalFilename: self.currentPhoto.filename) - + _ = try self.secureFileManager.savePhoto(imageData, withMetadata: [:], isEdited: true, originalFilename: self.currentPhoto.id) + DispatchQueue.main.async { withAnimation { self.modifiedImage = maskedImage self.processingFaces = false - + DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { withAnimation { self.isFaceDetectionActive = false @@ -243,44 +251,44 @@ class PhotoDetailViewModel: ObservableObject { } } } - + // MARK: - Navigation Methods - + func preloadAdjacentPhotos() { guard !allPhotos.isEmpty else { return } - + // Preload previous photo if available if currentIndex > 0 { let prevIndex = currentIndex - 1 let prevPhoto = allPhotos[prevIndex] prevPhoto.isVisible = true // Mark as visible for memory manager - + // Access thumbnail to trigger load but in a background thread DispatchQueue.global(qos: .userInitiated).async { _ = prevPhoto.thumbnail } } - + // Preload next photo if available if currentIndex < allPhotos.count - 1 { let nextIndex = currentIndex + 1 let nextPhoto = allPhotos[nextIndex] nextPhoto.isVisible = true // Mark as visible for memory manager - + // Access thumbnail to trigger load but in a background thread DispatchQueue.global(qos: .userInitiated).async { _ = nextPhoto.thumbnail } } } - + func navigateToPrevious() { print("🟢 PhotoDetailViewModel: navigateToPrevious called") if canGoToPrevious { // Clean up memory by releasing the full-size image of the current photo // but keep the thumbnail for the gallery view allPhotos[currentIndex].clearMemory(keepThumbnail: true) - + withAnimation(.spring(response: 0.6, dampingFraction: 0.8)) { currentIndex -= 1 // Reset rotation when changing photos @@ -295,21 +303,21 @@ class PhotoDetailViewModel: ObservableObject { offset = 0 isSwiping = false } - + // Preload adjacent photos for smoother navigation DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) { self.preloadAdjacentPhotos() } } } - + func navigateToNext() { print("🟢 PhotoDetailViewModel: navigateToNext called") if canGoToNext { // Clean up memory by releasing the full-size image of the current photo // but keep the thumbnail for the gallery view allPhotos[currentIndex].clearMemory(keepThumbnail: true) - + withAnimation(.spring(response: 0.6, dampingFraction: 0.8)) { currentIndex += 1 // Reset rotation when changing photos @@ -324,16 +332,16 @@ class PhotoDetailViewModel: ObservableObject { offset = 0 isSwiping = false } - + // Preload adjacent photos for smoother navigation DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) { self.preloadAdjacentPhotos() } } } - + // MARK: - Image Manipulation - + func resetZoomAndPan() { withAnimation(.spring()) { currentScale = 1.0 @@ -344,14 +352,14 @@ class PhotoDetailViewModel: ObservableObject { // Reset the last drag position outside of animation to avoid jumps lastDragPosition = .zero } - + func rotateImage(direction: Double) { // Reset any zoom or panning when rotating resetZoomAndPan() - + // Apply rotation imageRotation += direction - + // Normalize to 0-360 range if imageRotation >= 360 { imageRotation -= 360 @@ -359,26 +367,26 @@ class PhotoDetailViewModel: ObservableObject { imageRotation += 360 } } - + // MARK: - Photo Management - + func deletePhoto() { deleteCurrentPhoto() } - + func deleteCurrentPhoto() { print("deleteCurrentPhoto called - starting deletion process") // Get the photo to delete let photoToDelete = currentPhoto - + // Perform file deletion in a background thread DispatchQueue.global(qos: .userInitiated).async { do { // Actually delete the file - print("Attempting to delete file: \(photoToDelete.filename)") - try self.secureFileManager.deletePhoto(filename: photoToDelete.filename) + print("Attempting to delete file: \(photoToDelete.id)") + try self.secureFileManager.deletePhoto(filename: photoToDelete.id) print("File deletion successful") - + // All UI updates must happen on the main thread DispatchQueue.main.async { print("Calling onDelete callback") @@ -386,14 +394,14 @@ class PhotoDetailViewModel: ObservableObject { if let onDelete = self.onDelete { onDelete(photoToDelete) } - + // If we're displaying multiple photos, we can navigate to next/previous // instead of dismissing if there are still photos to display - if !self.allPhotos.isEmpty && self.allPhotos.count > 1 { + if !self.allPhotos.isEmpty, self.allPhotos.count > 1 { // Remove the deleted photo from our local array var updatedPhotos = self.allPhotos updatedPhotos.remove(at: self.currentIndex) - + if updatedPhotos.isEmpty { // If no photos left, call dismiss handler if let onDismiss = self.onDismiss { @@ -404,7 +412,7 @@ class PhotoDetailViewModel: ObservableObject { if self.currentIndex >= updatedPhotos.count { self.currentIndex = updatedPhotos.count - 1 } - + // Update our photos array self.allPhotos = updatedPhotos } @@ -417,7 +425,7 @@ class PhotoDetailViewModel: ObservableObject { } } catch { print("Error deleting photo: \(error.localizedDescription)") - + // Show error alert if needed - would be implemented with a published property DispatchQueue.main.async { // Here you could set an error state and show an alert @@ -425,13 +433,13 @@ class PhotoDetailViewModel: ObservableObject { } } } - + // MARK: - Sharing - + func sharePhoto() { // Get the current photo image let image = displayedImage - + // Find the root view controller guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene, let window = windowScene.windows.first, @@ -440,34 +448,34 @@ class PhotoDetailViewModel: ObservableObject { print("Could not find root view controller") return } - + // Find the presented view controller to present from var currentController = rootViewController while let presented = currentController.presentedViewController { currentController = presented } - + // Convert image to data for sharing with UUID filename if let imageData = image.jpegData(compressionQuality: 0.9) { do { // Prepare photo for sharing with UUID filename let fileURL = try secureFileManager.preparePhotoForSharing(imageData: imageData) - + print("Sharing photo with UUID filename: \(fileURL.lastPathComponent)") - + // Create a UIActivityViewController to show the sharing options with the file let activityViewController = UIActivityViewController( activityItems: [fileURL], applicationActivities: nil ) - + // For iPad support if let popover = activityViewController.popoverPresentationController { popover.sourceView = window popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) popover.permittedArrowDirections = [] } - + // Present the share sheet DispatchQueue.main.async { currentController.present(activityViewController, animated: true) { @@ -476,20 +484,20 @@ class PhotoDetailViewModel: ObservableObject { } } catch { print("Error preparing photo for sharing: \(error.localizedDescription)") - + // Fallback to sharing just the image if file preparation fails let activityViewController = UIActivityViewController( activityItems: [image], applicationActivities: nil ) - + // For iPad support if let popover = activityViewController.popoverPresentationController { popover.sourceView = window popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) popover.permittedArrowDirections = [] } - + DispatchQueue.main.async { currentController.present(activityViewController, animated: true) { print("Share sheet presented successfully (image fallback)") @@ -502,14 +510,14 @@ class PhotoDetailViewModel: ObservableObject { activityItems: [image], applicationActivities: nil ) - + // For iPad support if let popover = activityViewController.popoverPresentationController { popover.sourceView = window popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) popover.permittedArrowDirections = [] } - + DispatchQueue.main.async { currentController.present(activityViewController, animated: true) { print("Share sheet presented successfully (image fallback)") @@ -517,32 +525,32 @@ class PhotoDetailViewModel: ObservableObject { } } } - + // MARK: - View Lifecycle - + func onAppear() { // When the detail view appears, ensure it's properly registered with memory manager if !allPhotos.isEmpty { // Current photo should be visible allPhotos[currentIndex].isVisible = true - + // Register all photos with the memory manager MemoryManager.shared.registerPhotos(allPhotos) - + // Preload adjacent photos for smoother navigation DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) { self.preloadAdjacentPhotos() } - } else if let photo = photo { + } else if let photo { // Single photo case photo.isVisible = true MemoryManager.shared.registerPhotos([photo]) } } - + func onDisappear() { // Clean up when view disappears - if let onDismiss = onDismiss { + if let onDismiss { onDismiss() } } diff --git a/SnapSafe/PhotoDetailViewImpl.swift b/SnapSafe/Views/PhotoDetailViewImpl.swift similarity index 87% rename from SnapSafe/PhotoDetailViewImpl.swift rename to SnapSafe/Views/PhotoDetailViewImpl.swift index ede5dc5..61cb30c 100644 --- a/SnapSafe/PhotoDetailViewImpl.swift +++ b/SnapSafe/Views/PhotoDetailViewImpl.swift @@ -1,12 +1,12 @@ // -// PhotoDetailView.swift +// PhotoDetailViewImpl.swift // SnapSafe // // Created by Bill Booth on 5/18/25. // -import SwiftUI import Foundation +import SwiftUI import UIKit // This file is now a forwarder to the refactored PhotoDetailView @@ -25,4 +25,4 @@ extension SnapSafe { } // Use a typealias to forward to the new implementation -typealias PhotoDetailView = PhotoDetailView_Impl \ No newline at end of file +typealias PhotoDetailView = PhotoDetailView_Impl diff --git a/SnapSafe/PrivacyShield.swift b/SnapSafe/Views/PrivacyShieldView.swift similarity index 92% rename from SnapSafe/PrivacyShield.swift rename to SnapSafe/Views/PrivacyShieldView.swift index 4e17a31..371bae3 100644 --- a/SnapSafe/PrivacyShield.swift +++ b/SnapSafe/Views/PrivacyShieldView.swift @@ -1,5 +1,5 @@ // -// PrivacyShield.swift +// PrivacyShieldView.swift // SnapSafe // // Created by Bill Booth on 5/22/25. @@ -15,24 +15,24 @@ struct PrivacyShield: View { Color.black .opacity(0.98) .edgesIgnoringSafeArea(.all) - + VStack(spacing: 30) { // App logo/icon Image(systemName: "lock.shield.fill") .font(.system(size: 100)) .foregroundColor(.white) .padding(.top, 60) - + // App name Text("SnapSafe") .font(.system(size: 32, weight: .bold)) .foregroundColor(.white) - + // Privacy message Text("The camera app that minds its own business.") .font(.system(size: 20, weight: .medium)) .foregroundColor(.gray) - + Spacer() } .frame(maxWidth: .infinity, maxHeight: .infinity) @@ -50,7 +50,7 @@ struct ObscureWhenInactive: ViewModifier { // Main content that will be obscured when inactive content .blur(radius: obscured ? 20 : 0) - + // Privacy shield overlay if obscured { PrivacyShield() @@ -78,16 +78,13 @@ extension View { #Preview { ZStack { - // Sample background content VStack { Text("Sensitive Content") .font(.largeTitle) - + Image(systemName: "person.crop.square") .font(.system(size: 100)) } - - // Preview with privacy shield active PrivacyShield() } } diff --git a/SnapSafe/Views/ScreenRecordingBlockerView.swift b/SnapSafe/Views/ScreenRecordingBlockerView.swift new file mode 100644 index 0000000..2538e6a --- /dev/null +++ b/SnapSafe/Views/ScreenRecordingBlockerView.swift @@ -0,0 +1,62 @@ +// +// ScreenRecordingBlockerView.swift +// SnapSafe +// +// Created by Bill Booth on 6/10/25. +// + +import SwiftUI + +// View shown when screen recording is detected +struct ScreenRecordingBlockerView: View { + var body: some View { + ZStack { + // Background + Color.black + .edgesIgnoringSafeArea(.all) + + VStack(spacing: 30) { + // Warning icon + Image(systemName: "eye.slash") + .font(.system(size: 80)) + .foregroundColor(.red) + .padding(.top, 60) + + // Warning message + Text("Screen Recording Detected") + .font(.system(size: 24, weight: .bold)) + .foregroundColor(.white) + + Text("For privacy and security reasons, screen recording is not allowed in SnapSafe.") + .font(.system(size: 16)) + .foregroundColor(.gray) + .multilineTextAlignment(.center) + .padding(.horizontal, 40) + + Text("Please stop recording to continue using SnapSafe.") + .font(.system(size: 16, weight: .semibold)) + .foregroundColor(.white) + .padding(.top, 20) + + Spacer() + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + } + } +} + +#Preview { + ZStack { + // Sample content + VStack { + Text("Sensitive Content") + .font(.largeTitle) + + Image(systemName: "person.crop.square") + .font(.system(size: 100)) + } + + // Preview with screen recording blocker + ScreenRecordingBlockerView() + } +} diff --git a/SnapSafe/Views/ScreenshotTakenView.swift b/SnapSafe/Views/ScreenshotTakenView.swift new file mode 100644 index 0000000..122e893 --- /dev/null +++ b/SnapSafe/Views/ScreenshotTakenView.swift @@ -0,0 +1,34 @@ +// +// ScreenshotTakenView.swift +// SnapSafe +// +// Created by Bill Booth on 6/10/25. +// + +import SwiftUI + +// View shown when a screenshot is taken +struct ScreenshotTakenView: View { + var body: some View { + VStack { + HStack(spacing: 15) { + Image(systemName: "exclamationmark.triangle.fill") + .foregroundColor(.yellow) + .font(.system(size: 24)) + + Text("Screenshot Captured") + .font(.system(size: 16, weight: .semibold)) + .foregroundColor(.white) + + Spacer() + } + .padding() + .background(Color.black.opacity(0.8)) + .cornerRadius(10) + .padding(.horizontal) + .padding(.top, 10) + + Spacer() + } + } +} diff --git a/SnapSafe/SecureGalleryView.swift b/SnapSafe/Views/SecureGalleryView.swift similarity index 67% rename from SnapSafe/SecureGalleryView.swift rename to SnapSafe/Views/SecureGalleryView.swift index 826042f..df33298 100644 --- a/SnapSafe/SecureGalleryView.swift +++ b/SnapSafe/Views/SecureGalleryView.swift @@ -21,24 +21,21 @@ struct EmptyGalleryView: View { } } - // Gallery view to display the stored photos struct SecureGalleryView: View { @State private var photos: [SecurePhoto] = [] @State private var selectedPhoto: SecurePhoto? @AppStorage("showFaceDetection") private var showFaceDetection = true // Using AppStorage to share with Settings @State private var isSelecting: Bool = false - @State private var selectedPhotoIds = Set() + @State private var selectedPhotoIds = Set() @State private var showDeleteConfirmation = false - @State private var isShowingImagePicker = false - @State private var importedImage: UIImage? // Legacy support @State private var pickerItems: [PhotosPickerItem] = [] @State private var isImporting: Bool = false @State private var importProgress: Float = 0 - + // Filter state @State private var selectedFilter: PhotoFilter = .all - + // Decoy selection mode @State private var isSelectingDecoys: Bool = false @State private var maxDecoys: Int = 10 @@ -47,7 +44,7 @@ struct SecureGalleryView: View { private let secureFileManager = SecureFileManager() @Environment(\.dismiss) private var dismiss - + // Callback for dismissing the gallery let onDismiss: (() -> Void)? @@ -68,30 +65,21 @@ struct SecureGalleryView: View { } // Computed property to get current decoy photo count - private var currentDecoyCount: Int { - photos.filter { $0.isDecoy }.count - } - +// private var currentDecoyCount: Int { +// photos.count(where: { $0.isDecoy }) +// } + // Computed property to get filtered photos private var filteredPhotos: [SecurePhoto] { switch selectedFilter { case .all: - return photos + photos case .imported: - return photos.filter { $0.metadata["imported"] as? Bool == true } + photos.filter { _ in false } // TODO: Add imported flag to PhotoMetadata case .edited: - return photos.filter { $0.metadata["isEdited"] as? Bool == true } + photos.filter { _ in false } // TODO: Add edited flag to PhotoMetadata case .withLocation: - return photos.filter { - // Check for GPS data in metadata using Core Graphics constants - guard let gpsData = $0.metadata[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { return false } - - // Verify we have either latitude or longitude data - let hasLatitude = gpsData[String(kCGImagePropertyGPSLatitude)] != nil - let hasLongitude = gpsData[String(kCGImagePropertyGPSLongitude)] != nil - - return hasLatitude || hasLongitude - } + photos.filter { _ in false } // TODO: Add location data to PhotoMetadata } } @@ -99,16 +87,16 @@ struct SecureGalleryView: View { private var selectedPhotos: [UIImage] { photos .filter { selectedPhotoIds.contains($0.id) } - .map { $0.fullImage } + .map(\.fullImage) } var body: some View { ZStack { Group { if photos.isEmpty { - EmptyGalleryView(onDismiss: { + EmptyGalleryView(onDismiss: { onDismiss?() - dismiss() + dismiss() }) } else { photosGridView @@ -158,7 +146,7 @@ struct SecureGalleryView: View { .foregroundColor(.blue) } } - + // Action buttons in the trailing position (simplified for top toolbar) ToolbarItem(placement: .navigationBarTrailing) { HStack(spacing: 16) { @@ -167,7 +155,7 @@ struct SecureGalleryView: View { Text("\(selectedPhotoIds.count)/\(maxDecoys)") .font(.caption) .foregroundColor(selectedPhotoIds.count > maxDecoys ? .red : .secondary) - + Button("Save") { if selectedPhotoIds.count > maxDecoys { showDecoyLimitWarning = true @@ -190,7 +178,7 @@ struct SecureGalleryView: View { Button("Select Photos") { isSelecting = true } - + Menu("Filter Photos") { ForEach(PhotoFilter.allCases, id: \.self) { filter in Button(action: { @@ -216,7 +204,7 @@ struct SecureGalleryView: View { .toolbar { // Bottom toolbar with main action buttons ToolbarItemGroup(placement: .bottomBar) { - if !isSelectingDecoys && !isSelecting { + if !isSelectingDecoys, !isSelecting { // Normal mode: Import and Refresh buttons PhotosPicker(selection: $pickerItems, matching: .images, photoLibrary: .shared()) { Label("Import", systemImage: "square.and.arrow.down") @@ -277,24 +265,24 @@ struct SecureGalleryView: View { } } } - + Spacer() - + // Button(action: loadPhotos) { // Label("Refresh", systemImage: "arrow.clockwise") // } - } else if isSelecting && hasSelection && !isSelectingDecoys { + } else if isSelecting, hasSelection, !isSelectingDecoys { // Selection mode: Delete and Share buttons - Button(action: { + Button(action: { print("Delete button pressed in gallery view, selected photos: \(selectedPhotoIds.count)") showDeleteConfirmation = true }) { Label("Delete", systemImage: "trash") .foregroundColor(.red) } - + Spacer() - + Button(action: shareSelectedPhotos) { Label("Share", systemImage: "square.and.arrow.up") } @@ -308,74 +296,75 @@ struct SecureGalleryView: View { } } .fullScreenCover(item: $selectedPhoto) { photo in - // Find the index of the selected photo in the photos array - if let initialIndex = filteredPhotos.firstIndex(where: { $0.id == photo.id }) { - EnhancedPhotoDetailView( - allPhotos: filteredPhotos, - initialIndex: initialIndex, - showFaceDetection: showFaceDetection, - onDelete: { _ in loadPhotos() }, - onDismiss: { - // Clean up memory for all loaded full-size images when returning to gallery - for photo in self.photos { - photo.clearMemory(keepThumbnail: true) - } - // Trigger garbage collection - MemoryManager.shared.checkMemoryUsage() - } - ) - } else { - // Fallback if photo not found in array - PhotoDetailView( - photo: photo, - showFaceDetection: showFaceDetection, - onDelete: { _ in loadPhotos() }, - onDismiss: { + // Find the index of the selected photo in the photos array + if let initialIndex = filteredPhotos.firstIndex(where: { $0.id == photo.id }) { + EnhancedPhotoDetailView( + allPhotos: filteredPhotos, + initialIndex: initialIndex, + showFaceDetection: showFaceDetection, + onDelete: { _ in loadPhotos() }, + onDismiss: { + // Clean up memory for all loaded full-size images when returning to gallery + for photo in photos { photo.clearMemory(keepThumbnail: true) - // Trigger garbage collection - MemoryManager.shared.checkMemoryUsage() } - ) - } - } - .alert( - "Delete Photo\(selectedPhotoIds.count > 1 ? "s" : "")", - isPresented: $showDeleteConfirmation, - actions: { - Button("Cancel", role: .cancel) {} - Button("Delete", role: .destructive) { - print("Delete confirmation button pressed, deleting \(selectedPhotoIds.count) photos") - deleteSelectedPhotos() + // Trigger garbage collection + MemoryManager.shared.checkMemoryUsage() } - }, - message: { - Text("Are you sure you want to delete \(selectedPhotoIds.count) photo\(selectedPhotoIds.count > 1 ? "s" : "")? This action cannot be undone.") - } - ) - .alert( - "Too Many Decoys", - isPresented: $showDecoyLimitWarning, - actions: { - Button("OK", role: .cancel) {} - }, - message: { - Text("You can select a maximum of \(maxDecoys) decoy photos. Please deselect some photos before saving.") - } - ) - .alert( - "Save Decoy Selection", - isPresented: $showDecoyConfirmation, - actions: { - Button("Cancel", role: .cancel) {} - Button("Save") { - saveDecoySelections() + ) + } else { + // Fallback if photo not found in array + PhotoDetailView( + photo: photo, + showFaceDetection: showFaceDetection, + onDelete: { _ in loadPhotos() }, + onDismiss: { + photo.clearMemory(keepThumbnail: true) + // Trigger garbage collection + MemoryManager.shared.checkMemoryUsage() } - }, - message: { - Text("Are you sure you want to save these \(selectedPhotoIds.count) photos as decoys? These will be shown when the emergency PIN is entered.") - } - ) + ) + } } + .alert( + "Delete Photo\(selectedPhotoIds.count > 1 ? "s" : "")", + isPresented: $showDeleteConfirmation, + actions: { + Button("Cancel", role: .cancel) {} + Button("Delete", role: .destructive) { + print("Delete confirmation button pressed, deleting \(selectedPhotoIds.count) photos") + deleteSelectedPhotos() + } + }, + message: { + Text("Are you sure you want to delete \(selectedPhotoIds.count) photo\(selectedPhotoIds.count > 1 ? "s" : "")? This action cannot be undone.") + } + ) + .alert( + "Too Many Decoys", + isPresented: $showDecoyLimitWarning, + actions: { + Button("OK", role: .cancel) {} + }, + message: { + Text("You can select a maximum of \(maxDecoys) decoy photos. Please deselect some photos before saving.") + } + ) + .alert( + "Save Decoy Selection", + isPresented: $showDecoyConfirmation, + actions: { + Button("Cancel", role: .cancel) {} + Button("Save") { + saveDecoySelections() + } + }, + message: { + Text("Are you sure you want to save these \(selectedPhotoIds.count) photos as decoys? These will be shown when the emergency PIN is entered.") + } + ) + } + // } // Photo grid subview @@ -413,7 +402,7 @@ struct SecureGalleryView: View { let filename = await withCheckedContinuation { continuation in DispatchQueue.global(qos: .userInitiated).async { do { - let filename = try self.secureFileManager.savePhoto(imageData, withMetadata: metadata) + let filename = try secureFileManager.savePhoto(imageData, withMetadata: metadata) continuation.resume(returning: filename) } catch { print("Error saving imported photo: \(error.localizedDescription)") @@ -428,26 +417,26 @@ struct SecureGalleryView: View { } // Legacy method for backward compatibility - private func handleImportedImage() { - guard let image = importedImage else { return } - - // Convert image to data - guard let imageData = image.jpegData(compressionQuality: 0.8) else { - print("Failed to convert image to data") - return - } - - // Process the image data using the new method - Task { - await processImportedImageData(imageData) - - // Reload photos to show the new one - DispatchQueue.main.async { - self.importedImage = nil - self.loadPhotos() - } - } - } +// private func handleImportedImage() { +// guard let image = importedImage else { return } +// +// // Convert image to data +// guard let imageData = image.jpegData(compressionQuality: 0.8) else { +// print("Failed to convert image to data") +// return +// } +// +// // Process the image data using the new method +// Task { +// await processImportedImageData(imageData) +// +// // Reload photos to show the new one +// DispatchQueue.main.async { +// importedImage = nil +// loadPhotos() +// } +// } +// } // MARK: - Action methods @@ -464,7 +453,7 @@ struct SecureGalleryView: View { selectedPhotoIds.remove(photo.id) } else { // If we're selecting decoys and already at the limit, don't allow more selections - if isSelectingDecoys && selectedPhotoIds.count >= maxDecoys { + if isSelectingDecoys, selectedPhotoIds.count >= maxDecoys { showDecoyLimitWarning = true return } @@ -478,68 +467,103 @@ struct SecureGalleryView: View { } // Utility function to fix image orientation - private func fixImageOrientation(_ image: UIImage) -> UIImage { - // If the orientation is already correct, return the image as is - if image.imageOrientation == .up { - return image - } - - // Create a new CGContext with proper orientation - UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale) - image.draw(in: CGRect(origin: .zero, size: image.size)) - let normalizedImage = UIGraphicsGetImageFromCurrentImageContext()! - UIGraphicsEndImageContext() - - return normalizedImage - } +// private func fixImageOrientation(_ image: UIImage) -> UIImage { +// // If the orientation is already correct, return the image as is +// if image.imageOrientation == .up { +// return image +// } +// +// // Create a new CGContext with proper orientation +// UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale) +// image.draw(in: CGRect(origin: .zero, size: image.size)) +// let normalizedImage = UIGraphicsGetImageFromCurrentImageContext()! +// UIGraphicsEndImageContext() +// +// return normalizedImage +// } private func loadPhotos() { // Load photos in the background thread to avoid UI blocking DispatchQueue.global(qos: .userInitiated).async { do { - // Only load metadata and file URLs, not actual image data - let photoMetadata = try self.secureFileManager.loadAllPhotoMetadata() - - // Create photo objects that will load their images on demand - var loadedPhotos = photoMetadata.map { filename, metadata, fileURL in - SecurePhoto( - filename: filename, - metadata: metadata, - fileURL: fileURL - ) - } - - // We'll update on main thread after sorting + // Load metadata and file URLs from legacy system + let photoMetadata = try secureFileManager.loadAllPhotoMetadata() + + // Convert legacy metadata to SecurePhoto objects + var loadedPhotos: [SecurePhoto] = [] + + for (filename, metadataDict, fileURL) in photoMetadata { + do { + // Load the unencrypted photo data from legacy system + let imageData = try Data(contentsOf: fileURL) + + // Convert legacy metadata dictionary to PhotoMetadata struct + let creationDate = Date(timeIntervalSince1970: metadataDict["creationDate"] as? TimeInterval ?? Date().timeIntervalSince1970) + let modificationDate = Date(timeIntervalSince1970: metadataDict["modificationDate"] as? TimeInterval ?? Date().timeIntervalSince1970) + let fileSize = metadataDict["fileSize"] as? Int ?? imageData.count + let isDecoy = metadataDict["isDecoy"] as? Bool ?? false + + // Create PhotoMetadata struct + let metadata = PhotoMetadata( + id: filename, + creationDate: creationDate, + modificationDate: modificationDate, + fileSize: fileSize, + faces: [], // TODO: Load faces from metadata if available + maskMode: .none, // TODO: Load mask mode from metadata if available + isDecoy: isDecoy + ) + + // Create UIImage and generate thumbnail + guard let image = UIImage(data: imageData) else { + print("Invalid image data for \(filename)") + continue + } - // Sort photos by creation date (oldest at top, newest at bottom) - loadedPhotos.sort { photo1, photo2 in - // Get creation dates from metadata - let date1 = photo1.metadata["creationDate"] as? Double ?? 0 - let date2 = photo2.metadata["creationDate"] as? Double ?? 0 + // Generate thumbnail + let thumbnailSize = CGSize(width: 200, height: 200) + let renderer = UIGraphicsImageRenderer(size: thumbnailSize) + let thumbnail = renderer.image { _ in + image.draw(in: CGRect(origin: .zero, size: thumbnailSize)) + } - // Sort by date (descending - newest first, which is more typical for photo galleries) - return date2 < date1 + // Create SecurePhoto object with cached images (legacy system uses unencrypted data) + let securePhoto = SecurePhoto( + id: filename, + encryptedData: Data(), // Empty since legacy system doesn't encrypt + metadata: metadata, + cachedImage: image, + cachedThumbnail: thumbnail + ) + + loadedPhotos.append(securePhoto) + } catch { + print("Error loading photo \(filename): \(error.localizedDescription)") + } } + // Sort by creation date (newest first) + loadedPhotos.sort { $0.metadata.creationDate > $1.metadata.creationDate } + // Update UI on the main thread DispatchQueue.main.async { // First clear memory of existing photos if we're refreshing MemoryManager.shared.freeAllMemory() // Update the photos array - self.photos = loadedPhotos + photos = loadedPhotos // If in decoy selection mode, pre-select existing decoy photos - if self.isSelectingDecoys { + if isSelectingDecoys { // Find and select all photos that are already marked as decoys for photo in loadedPhotos { if photo.isDecoy { - self.selectedPhotoIds.insert(photo.id) + selectedPhotoIds.insert(photo.id) } } // Enable selection mode - self.isSelecting = true + isSelecting = true } // Register these photos with the memory manager @@ -551,44 +575,44 @@ struct SecureGalleryView: View { } } - private func deletePhoto(_ photo: SecurePhoto) { - // Perform file deletion in background thread - DispatchQueue.global(qos: .userInitiated).async { - do { - try self.secureFileManager.deletePhoto(filename: photo.filename) - - // Update UI on main thread - DispatchQueue.main.async { - // Remove from the local array - withAnimation { - self.photos.removeAll { $0.id == photo.id } - if self.selectedPhotoIds.contains(photo.id) { - self.selectedPhotoIds.remove(photo.id) - } - } - } - } catch { - print("Error deleting photo: \(error.localizedDescription)") - } - } - } +// private func deletePhoto(_ photo: SecurePhoto) { +// // Perform file deletion in background thread +// DispatchQueue.global(qos: .userInitiated).async { +// do { +// try secureFileManager.deletePhoto(filename: photo.id) +// +// // Update UI on main thread +// DispatchQueue.main.async { +// // Remove from the local array +// withAnimation { +// photos.removeAll { $0.id == photo.id } +// if selectedPhotoIds.contains(photo.id) { +// selectedPhotoIds.remove(photo.id) +// } +// } +// } +// } catch { +// print("Error deleting photo: \(error.localizedDescription)") +// } +// } +// } private func deleteSelectedPhotos() { print("deleteSelectedPhotos() called") - + // Create a local copy of the photos to delete let photosToDelete = selectedPhotoIds.compactMap { id in photos.first(where: { $0.id == id }) } - - print("Will delete \(photosToDelete.count) photos: \(photosToDelete.map { $0.filename }.joined(separator: ", "))") + + print("Will delete \(photosToDelete.count) photos: \(photosToDelete.map(\.id).joined(separator: ", "))") // Clear selection and exit selection mode immediately // for better UI responsiveness DispatchQueue.main.async { print("Clearing selection UI state") - self.selectedPhotoIds.removeAll() - self.isSelecting = false + selectedPhotoIds.removeAll() + isSelecting = false } // Process deletions in a background queue @@ -600,12 +624,12 @@ struct SecureGalleryView: View { for photo in photosToDelete { group.enter() do { - print("Attempting to delete: \(photo.filename)") - try self.secureFileManager.deletePhoto(filename: photo.filename) - print("Successfully deleted: \(photo.filename)") + print("Attempting to delete: \(photo.id)") + try secureFileManager.deletePhoto(filename: photo.id) + print("Successfully deleted: \(photo.id)") group.leave() } catch { - print("Error deleting photo \(photo.filename): \(error.localizedDescription)") + print("Error deleting photo \(photo.id): \(error.localizedDescription)") group.leave() } } @@ -613,23 +637,23 @@ struct SecureGalleryView: View { // After all deletions are complete, update the UI group.notify(queue: .main) { print("All deletions complete, updating UI") - + // Count photos before removal - let initialCount = self.photos.count - + let initialCount = photos.count + // Remove deleted photos from our array withAnimation { - self.photos.removeAll { photo in + photos.removeAll { photo in let shouldRemove = photosToDelete.contains { $0.id == photo.id } if shouldRemove { - print("Removing photo \(photo.filename) from UI") + print("Removing photo \(photo.id) from UI") } return shouldRemove } } - + // Verify removal - let finalCount = self.photos.count + let finalCount = photos.count let removedCount = initialCount - finalCount print("UI update complete: removed \(removedCount) photos. Gallery now has \(finalCount) photos.") } @@ -640,18 +664,17 @@ struct SecureGalleryView: View { // Save selected photos as decoys private func saveDecoySelections() { // First, un-mark any previously tagged decoys that aren't currently selected - for photo in photos { - let isCurrentlySelected = selectedPhotoIds.contains(photo.id) - - // If it's currently a decoy but not selected, unmark it - if photo.isDecoy && !isCurrentlySelected { - photo.setDecoyStatus(false) - } - // If it's selected but not a decoy, mark it - else if isCurrentlySelected && !photo.isDecoy { - photo.setDecoyStatus(true) - } - } +// for photo in photos { +// let isCurrentlySelected = selectedPhotoIds.contains(photo.id) + + // TODO: Implement decoy status update with new repository pattern + // if photo.isDecoy && !isCurrentlySelected { + // photo.setDecoyStatus(false) + // } + // else if isCurrentlySelected && !photo.isDecoy { + // photo.setDecoyStatus(true) + // } +// } // Reset selection and exit decoy mode isSelectingDecoys = false @@ -663,12 +686,11 @@ struct SecureGalleryView: View { dismiss() } - private func shareSelectedPhotos() { // Get all the selected photos let images = selectedPhotos guard !images.isEmpty else { return } - + // Find the root view controller guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene, let window = windowScene.windows.first, @@ -677,16 +699,16 @@ struct SecureGalleryView: View { print("Could not find root view controller") return } - + // Find the presented view controller to present from var currentController = rootViewController while let presented = currentController.presentedViewController { currentController = presented } - + // Create and prepare temporary files with UUID filenames var filesToShare: [URL] = [] - + for image in images { if let imageData = image.jpegData(compressionQuality: 0.9) { do { @@ -698,7 +720,7 @@ struct SecureGalleryView: View { } } } - + // Share files if any were successfully prepared if !filesToShare.isEmpty { // Create a UIActivityViewController to share the files @@ -706,14 +728,14 @@ struct SecureGalleryView: View { activityItems: filesToShare, applicationActivities: nil ) - + // For iPad support if let popover = activityViewController.popoverPresentationController { popover.sourceView = window popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) popover.permittedArrowDirections = [] } - + // Present the share sheet DispatchQueue.main.async { currentController.present(activityViewController, animated: true) { @@ -723,19 +745,19 @@ struct SecureGalleryView: View { } else { // Fallback to sharing just the images if file preparation failed for all print("Falling back to sharing images directly") - + let activityViewController = UIActivityViewController( activityItems: images, applicationActivities: nil ) - + // For iPad support if let popover = activityViewController.popoverPresentationController { popover.sourceView = window popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) popover.permittedArrowDirections = [] } - + DispatchQueue.main.async { currentController.present(activityViewController, animated: true, completion: nil) } diff --git a/SnapSafe/SettingsView.swift b/SnapSafe/Views/SettingsView.swift similarity index 93% rename from SnapSafe/SettingsView.swift rename to SnapSafe/Views/SettingsView.swift index d13548b..7961a1f 100644 --- a/SnapSafe/SettingsView.swift +++ b/SnapSafe/Views/SettingsView.swift @@ -15,7 +15,7 @@ import SwiftUI struct SettingsView: View { // Appearance setting @AppStorage("appearanceMode") private var appearanceMode: AppearanceMode = .system - + // Sharing options @State private var sanitizeFileName = true @State private var sanitizeMetadata = true @@ -42,10 +42,10 @@ struct SettingsView: View { @State private var locationPermissionStatus = "Not Determined" @StateObject private var locationManager = LocationManager.shared @State private var includeLocationData = false - + // PIN Manager @ObservedObject private var pinManager = PINManager.shared - + @Environment(\.openURL) private var openURL // Dependency injections (commented until implementations are ready) @@ -55,8 +55,6 @@ struct SettingsView: View { var body: some View { NavigationView { List { - - // SHARING SECTION Section(header: Text("Sharing Options")) { Toggle("Sanitize File Name", isOn: $sanitizeFileName) @@ -105,7 +103,7 @@ struct SettingsView: View { } let permissionNotDetermined = locationManager.authorizationStatus == .notDetermined - + Button { if permissionNotDetermined { locationManager.requestLocationPermission() @@ -114,11 +112,11 @@ struct SettingsView: View { openURL(url) } } - } label: { - Text(permissionNotDetermined - ? "Request Location Permission" - : "Manage Permission in Settings") - } + } label: { + Text(permissionNotDetermined + ? "Request Location Permission" + : "Manage Permission in Settings") + } Text("When enabled, location data will be embedded in newly captured photos. Location requires permission and GPS availability.") .font(.caption) @@ -133,7 +131,7 @@ struct SettingsView: View { } } .pickerStyle(.menu) - + Text("Choose how the app appears. System follows your device's appearance setting.") .font(.caption) .foregroundColor(.secondary) @@ -179,7 +177,7 @@ struct SettingsView: View { // TODO: Update auth manager // authManager.isBiometricEnabled = newValue } - + Toggle("Require PIN when app resumes", isOn: $requirePINOnResume) .onChange(of: requirePINOnResume) { _, newValue in print("Require PIN on resume: \(newValue)") @@ -198,18 +196,18 @@ struct SettingsView: View { if newValue.count > 4 { appPIN = String(newValue.prefix(4)) } - + // Only allow numbers - if !newValue.allSatisfy({ $0.isNumber }) { - appPIN = newValue.filter { $0.isNumber } + if !newValue.allSatisfy(\.isNumber) { + appPIN = newValue.filter(\.isNumber) } - + // Clear any previous errors when typing if showPINError { showPINError = false } } - + SecureField("Confirm New PIN", text: $confirmAppPIN) .keyboardType(.numberPad) .autocorrectionDisabled(true) @@ -219,25 +217,25 @@ struct SettingsView: View { if newValue.count > 4 { confirmAppPIN = String(newValue.prefix(4)) } - + // Only allow numbers - if !newValue.allSatisfy({ $0.isNumber }) { - confirmAppPIN = newValue.filter { $0.isNumber } + if !newValue.allSatisfy(\.isNumber) { + confirmAppPIN = newValue.filter(\.isNumber) } - + // Clear any previous errors when typing if showPINError { showPINError = false } } - + if showPINError { Text(pinErrorMessage) .foregroundColor(.red) .font(.caption) .padding(.vertical, 5) } - + if showPINSuccess { Text("PIN updated successfully!") .foregroundColor(.green) @@ -281,7 +279,6 @@ struct SettingsView: View { .font(.caption) .foregroundColor(.secondary) } - } .navigationTitle("Settings") .navigationBarTitleDisplayMode(.inline) @@ -289,7 +286,7 @@ struct SettingsView: View { .onAppear { // Initialize includeLocationData from the LocationManager includeLocationData = locationManager.shouldIncludeLocationData - + // Initialize PIN on resume setting requirePINOnResume = pinManager.requirePINOnResume } @@ -339,39 +336,39 @@ struct SettingsView: View { // Reset any previous feedback showPINError = false showPINSuccess = false - + // Validate PIN if appPIN.count != 4 { showPINError = true pinErrorMessage = "PIN must be 4 digits" return } - + // Check if PINs match if appPIN != confirmAppPIN { showPINError = true pinErrorMessage = "PINs do not match" return } - + // Update the PIN using PIN manager pinManager.setPIN(appPIN) - + // Show success message showPINSuccess = true - + // Clear the fields appPIN = "" confirmAppPIN = "" - + // Clear success message after delay DispatchQueue.main.asyncAfter(deadline: .now() + 3) { - self.showPINSuccess = false + showPINSuccess = false } - + print("App PIN has been updated") } - + private func resetSecuritySettings() { // Reset all security settings to default values biometricEnabled = false diff --git a/SnapSafe/localize.xcstrings b/SnapSafe/localize.xcstrings new file mode 100644 index 0000000..fa56d16 --- /dev/null +++ b/SnapSafe/localize.xcstrings @@ -0,0 +1,7 @@ +{ + "sourceLanguage" : "en", + "strings" : { + + }, + "version" : "1.0" +} \ No newline at end of file diff --git a/SnapSafeTests/CameraModelTests.swift b/SnapSafeTests/CameraModelTests.swift index 4ac5b93..5f3284f 100644 --- a/SnapSafeTests/CameraModelTests.swift +++ b/SnapSafeTests/CameraModelTests.swift @@ -5,31 +5,30 @@ // Created by Bill Booth on 5/25/25. // -import XCTest import AVFoundation import Combine @testable import SnapSafe +import XCTest class CameraModelTests: XCTestCase { - private var cameraModel: CameraModel! private var cancellables: Set! - + override func setUp() { super.setUp() cameraModel = CameraModel() cancellables = Set() } - + override func tearDown() { cancellables?.removeAll() cancellables = nil cameraModel = nil super.tearDown() } - + // MARK: - Initialization Tests - + /// Tests that CameraModel initializes with correct default values /// Assertion: Should have proper initial state for all camera properties func testInit_SetsCorrectDefaults() { @@ -47,7 +46,7 @@ class CameraModelTests: XCTestCase { XCTAssertEqual(cameraModel.flashMode, .auto, "Flash mode should default to auto") XCTAssertEqual(cameraModel.cameraPosition, .back, "Should default to back camera") } - + /// Tests that CameraModel sets up foreground notification listener correctly /// Assertion: Should listen for app entering foreground to reset zoom level func testInit_SetsUpForegroundNotificationListener() { @@ -55,52 +54,52 @@ class CameraModelTests: XCTestCase { // We can't easily test NotificationCenter observer setup directly XCTAssertNotNil(cameraModel, "Camera model should initialize without issues") } - + // MARK: - Permission Handling Tests - + /// Tests that checkPermissions handles simulator environment correctly /// Assertion: Should grant permission immediately in simulator debug builds func testCheckPermissions_HandlesSimulatorCorrectly() { #if DEBUG && targetEnvironment(simulator) - let expectation = XCTestExpectation(description: "Permission should be granted in simulator") - - cameraModel.$isPermissionGranted - .dropFirst() - .sink { isGranted in - if isGranted { - expectation.fulfill() + let expectation = XCTestExpectation(description: "Permission should be granted in simulator") + + cameraModel.$isPermissionGranted + .dropFirst() + .sink { isGranted in + if isGranted { + expectation.fulfill() + } } - } - .store(in: &cancellables) - - cameraModel.checkPermissions() - - wait(for: [expectation], timeout: 3.0) + .store(in: &cancellables) + + cameraModel.checkPermissions() + + wait(for: [expectation], timeout: 3.0) #else - // On real device, we can't reliably test permission states without user interaction - XCTAssertTrue(true, "Skipping permission test on real device") + // On real device, we can't reliably test permission states without user interaction + XCTAssertTrue(true, "Skipping permission test on real device") #endif } - + /// Tests that checkPermissions handles authorized status correctly /// Assertion: Should set permission granted when already authorized func testCheckPermissions_HandlesAuthorizedStatus() { // Note: This test is limited because we can't control AVCaptureDevice authorization status // In a production app, you might use dependency injection to test this - + cameraModel.checkPermissions() - + // Test completes without crashing - actual permission depends on device/simulator state XCTAssertNotNil(cameraModel, "Should handle permission check without crashing") } - + // MARK: - Zoom Control Tests - + /// Tests that zoom factor can be updated correctly /// Assertion: Should update zoom factor and validate bounds func testZoomFactor_UpdatesCorrectly() { let expectation = XCTestExpectation(description: "Zoom factor should update") - + cameraModel.$zoomFactor .dropFirst() .sink { zoomFactor in @@ -108,20 +107,20 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.zoomFactor = 2.0 - + wait(for: [expectation], timeout: 1.0) } - + /// Tests that resetZoomLevel resets zoom to 1.0 /// Assertion: Should reset zoom factor to default value func testResetZoomLevel_ResetsToDefault() { let expectation = XCTestExpectation(description: "Zoom should reset to 1.0") - + // First set zoom to non-default value cameraModel.zoomFactor = 3.0 - + cameraModel.$zoomFactor .dropFirst() .sink { zoomFactor in @@ -130,30 +129,30 @@ class CameraModelTests: XCTestCase { } } .store(in: &cancellables) - + cameraModel.resetZoomLevel() - + wait(for: [expectation], timeout: 1.0) } - + /// Tests that zoom bounds are validated correctly /// Assertion: Should maintain zoom within min/max bounds func testZoomBounds_ValidatedCorrectly() { // Test that zoom factor stays within bounds let minZoom = cameraModel.minZoom let maxZoom = cameraModel.maxZoom - + XCTAssertLessThanOrEqual(cameraModel.zoomFactor, maxZoom, "Zoom should not exceed max") XCTAssertGreaterThanOrEqual(cameraModel.zoomFactor, minZoom, "Zoom should not go below min") } - + // MARK: - Camera Position Tests - + /// Tests that camera position can be changed /// Assertion: Should update camera position property func testCameraPosition_CanBeChanged() { let expectation = XCTestExpectation(description: "Camera position should change") - + cameraModel.$cameraPosition .dropFirst() .sink { position in @@ -161,17 +160,17 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.cameraPosition = .front - + wait(for: [expectation], timeout: 1.0) } - + /// Tests that lens type can be changed /// Assertion: Should update lens type property func testLensType_CanBeChanged() { let expectation = XCTestExpectation(description: "Lens type should change") - + cameraModel.$currentLensType .dropFirst() .sink { lensType in @@ -179,19 +178,19 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.currentLensType = .ultraWide - + wait(for: [expectation], timeout: 1.0) } - + // MARK: - Flash Mode Tests - + /// Tests that flash mode can be updated /// Assertion: Should update flash mode property correctly func testFlashMode_CanBeUpdated() { let expectation = XCTestExpectation(description: "Flash mode should update") - + cameraModel.$flashMode .dropFirst() .sink { flashMode in @@ -199,50 +198,50 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.flashMode = .on - + wait(for: [expectation], timeout: 1.0) } - + /// Tests all flash mode options /// Assertion: Should support all standard flash modes func testFlashMode_SupportsAllOptions() { let flashModes: [AVCaptureDevice.FlashMode] = [.auto, .on, .off] - + for mode in flashModes { cameraModel.flashMode = mode XCTAssertEqual(cameraModel.flashMode, mode, "Should support flash mode: \(mode)") } } - + // MARK: - Focus Indicator Tests - + /// Tests that focus indicator can be shown and hidden /// Assertion: Should update focus indicator visibility correctly func testFocusIndicator_CanBeShownAndHidden() { let expectation = XCTestExpectation(description: "Focus indicator should update") expectation.expectedFulfillmentCount = 2 - + cameraModel.$showingFocusIndicator .dropFirst() - .sink { showing in + .sink { _ in expectation.fulfill() } .store(in: &cancellables) - + cameraModel.showingFocusIndicator = true cameraModel.showingFocusIndicator = false - + wait(for: [expectation], timeout: 2.0) } - + /// Tests that focus indicator point can be set /// Assertion: Should update focus point correctly func testFocusIndicatorPoint_CanBeSet() { let expectation = XCTestExpectation(description: "Focus point should update") let testPoint = CGPoint(x: 100, y: 150) - + cameraModel.$focusIndicatorPoint .dropFirst() .sink { point in @@ -250,20 +249,20 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.focusIndicatorPoint = testPoint - + wait(for: [expectation], timeout: 1.0) } - + // MARK: - Recent Image Tests - + /// Tests that recent image can be set and retrieved /// Assertion: Should store and retrieve recent image correctly func testRecentImage_CanBeSetAndRetrieved() { let expectation = XCTestExpectation(description: "Recent image should update") let testImage = createTestImage() - + cameraModel.$recentImage .dropFirst() .sink { image in @@ -271,19 +270,19 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.recentImage = testImage - + wait(for: [expectation], timeout: 1.0) } - + // MARK: - Alert State Tests - + /// Tests that alert state can be managed correctly /// Assertion: Should update alert state correctly func testAlert_CanBeManaged() { let expectation = XCTestExpectation(description: "Alert state should update") - + cameraModel.$alert .dropFirst() .sink { alertShowing in @@ -291,104 +290,105 @@ class CameraModelTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + cameraModel.alert = true - + wait(for: [expectation], timeout: 1.0) } - + // MARK: - Session Management Tests - + /// Tests that AVCaptureSession is properly initialized /// Assertion: Should have valid capture session func testSession_ProperlyInitialized() { XCTAssertNotNil(cameraModel.session, "Capture session should be initialized") } - + /// Tests that photo output is properly initialized /// Assertion: Should have valid photo output func testPhotoOutput_ProperlyInitialized() { XCTAssertNotNil(cameraModel.output, "Photo output should be initialized") } - + // MARK: - Simulator-Specific Tests + #if DEBUG && targetEnvironment(simulator) - /// Tests that simulator setup works correctly - /// Assertion: Should set up mock camera functionality in simulator - func testSimulatorSetup_WorksCorrectly() { - let expectation = XCTestExpectation(description: "Simulator setup should complete") - - // In simulator, permission should be granted quickly - cameraModel.$isPermissionGranted - .dropFirst() - .sink { isGranted in - if isGranted { - expectation.fulfill() + /// Tests that simulator setup works correctly + /// Assertion: Should set up mock camera functionality in simulator + func testSimulatorSetup_WorksCorrectly() { + let expectation = XCTestExpectation(description: "Simulator setup should complete") + + // In simulator, permission should be granted quickly + cameraModel.$isPermissionGranted + .dropFirst() + .sink { isGranted in + if isGranted { + expectation.fulfill() + } } + .store(in: &cancellables) + + // Call setup directly for testing + cameraModel.checkPermissions() + + wait(for: [expectation], timeout: 3.0) + + // Check that zoom values are set correctly for simulator + DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { + XCTAssertEqual(self.cameraModel.minZoom, 0.5, "Simulator min zoom should be 0.5") + XCTAssertEqual(self.cameraModel.maxZoom, 10.0, "Simulator max zoom should be 10.0") + XCTAssertEqual(self.cameraModel.zoomFactor, 1.0, "Simulator zoom factor should be 1.0") } - .store(in: &cancellables) - - // Call setup directly for testing - cameraModel.checkPermissions() - - wait(for: [expectation], timeout: 3.0) - - // Check that zoom values are set correctly for simulator - DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { - XCTAssertEqual(self.cameraModel.minZoom, 0.5, "Simulator min zoom should be 0.5") - XCTAssertEqual(self.cameraModel.maxZoom, 10.0, "Simulator max zoom should be 10.0") - XCTAssertEqual(self.cameraModel.zoomFactor, 1.0, "Simulator zoom factor should be 1.0") } - } - - /// Tests that mock photo capture works in simulator - /// Assertion: Should be able to capture mock photos without camera hardware - func testMockPhotoCapture_WorksInSimulator() { - // Test that the camera model can handle mock photo operations - // Since captureMockPhoto is private, we test indirectly through the public interface - XCTAssertNotNil(cameraModel, "Camera model should work in simulator") - - // Test that recent image can be set (simulating capture) - let mockImage = createTestImage() - cameraModel.recentImage = mockImage - - XCTAssertNotNil(cameraModel.recentImage, "Should be able to set recent image in simulator") - } + + /// Tests that mock photo capture works in simulator + /// Assertion: Should be able to capture mock photos without camera hardware + func testMockPhotoCapture_WorksInSimulator() { + // Test that the camera model can handle mock photo operations + // Since captureMockPhoto is private, we test indirectly through the public interface + XCTAssertNotNil(cameraModel, "Camera model should work in simulator") + + // Test that recent image can be set (simulating capture) + let mockImage = createTestImage() + cameraModel.recentImage = mockImage + + XCTAssertNotNil(cameraModel.recentImage, "Should be able to set recent image in simulator") + } #endif - + // MARK: - View Size Tests - + /// Tests that view size can be set and maintained /// Assertion: Should store view size for camera calculations func testViewSize_CanBeSetAndMaintained() { let testSize = CGSize(width: 375, height: 812) - + cameraModel.viewSize = testSize - + XCTAssertEqual(cameraModel.viewSize, testSize, "View size should be maintained") } - + // MARK: - Memory Management Tests - + /// Tests that camera model properly handles deinitialization /// Assertion: Should clean up resources without memory leaks func testDeinit_CleansUpResources() { // Create and release camera model to test deinit var testCameraModel: CameraModel? = CameraModel() XCTAssertNotNil(testCameraModel, "Camera model should be created") - + testCameraModel = nil XCTAssertNil(testCameraModel, "Camera model should be deallocated") } - + // MARK: - Published Properties Tests - + /// Tests that all published properties can be observed /// Assertion: All @Published properties should emit changes correctly func testPublishedProperties_EmitChangesCorrectly() { let expectation = XCTestExpectation(description: "Published properties should emit changes") expectation.expectedFulfillmentCount = 8 // Number of properties we'll test - + // Test multiple published properties cameraModel.$isPermissionGranted.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) cameraModel.$alert.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) @@ -398,7 +398,7 @@ class CameraModelTests: XCTestCase { cameraModel.$showingFocusIndicator.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) cameraModel.$flashMode.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) cameraModel.$cameraPosition.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - + // Trigger changes cameraModel.isPermissionGranted = true cameraModel.alert = true @@ -408,17 +408,17 @@ class CameraModelTests: XCTestCase { cameraModel.showingFocusIndicator = true cameraModel.flashMode = .on cameraModel.cameraPosition = .front - + wait(for: [expectation], timeout: 3.0) } - + // MARK: - Integration Tests - + /// Tests the complete camera initialization flow /// Assertion: Should handle the full initialization sequence correctly func testCameraInitializationFlow_CompletesCorrectly() { let expectation = XCTestExpectation(description: "Camera initialization should complete") - + // Monitor permission changes as indicator of initialization progress cameraModel.$isPermissionGranted .dropFirst() @@ -428,21 +428,21 @@ class CameraModelTests: XCTestCase { } } .store(in: &cancellables) - + // Trigger initialization cameraModel.checkPermissions() - + wait(for: [expectation], timeout: 5.0) } - + /// Tests that foreground notification handling works correctly /// Assertion: Should reset zoom when app enters foreground func testForegroundNotificationHandling_ResetsZoom() { // Set zoom to non-default value cameraModel.zoomFactor = 5.0 - + let expectation = XCTestExpectation(description: "Zoom should reset on foreground") - + cameraModel.$zoomFactor .dropFirst() .sink { zoomFactor in @@ -451,31 +451,31 @@ class CameraModelTests: XCTestCase { } } .store(in: &cancellables) - + // Simulate app entering foreground NotificationCenter.default.post(name: UIApplication.willEnterForegroundNotification, object: nil) - + wait(for: [expectation], timeout: 2.0) } - + // MARK: - Error Handling Tests - + /// Tests that camera model handles errors gracefully /// Assertion: Should not crash when encountering various error conditions func testErrorHandling_HandlesGracefully() { // Test that setting invalid values doesn't crash - cameraModel.zoomFactor = -1.0 // Invalid zoom + cameraModel.zoomFactor = -1.0 // Invalid zoom XCTAssertNotNil(cameraModel, "Should handle invalid zoom without crashing") - - cameraModel.focusIndicatorPoint = CGPoint(x: CGFloat.infinity, y: CGFloat.nan) // Invalid point + + cameraModel.focusIndicatorPoint = CGPoint(x: CGFloat.infinity, y: CGFloat.nan) // Invalid point XCTAssertNotNil(cameraModel, "Should handle invalid focus point without crashing") - - cameraModel.viewSize = CGSize(width: -100, height: -100) // Invalid size + + cameraModel.viewSize = CGSize(width: -100, height: -100) // Invalid size XCTAssertNotNil(cameraModel, "Should handle invalid view size without crashing") } - + // MARK: - Helper Methods - + /// Creates a test image for use in tests private func createTestImage(size: CGSize = CGSize(width: 100, height: 100)) -> UIImage { let renderer = UIGraphicsImageRenderer(size: size) diff --git a/SnapSafeTests/EditedPhotoTrackingTests.swift b/SnapSafeTests/EditedPhotoTrackingTests.swift index 7eb5ba1..df86b04 100644 --- a/SnapSafeTests/EditedPhotoTrackingTests.swift +++ b/SnapSafeTests/EditedPhotoTrackingTests.swift @@ -5,23 +5,22 @@ // Created by Bill Booth on 5/26/25. // -import XCTest @testable import SnapSafe +import XCTest class EditedPhotoTrackingTests: XCTestCase { - var testFileManager: SecureFileManager! var tempDirectory: URL! - + override func setUp() { super.setUp() testFileManager = SecureFileManager() - + // Create a temporary directory for testing tempDirectory = FileManager.default.temporaryDirectory.appendingPathComponent(UUID().uuidString) try? FileManager.default.createDirectory(at: tempDirectory, withIntermediateDirectories: true) } - + override func tearDown() { // Clean up temporary directory try? FileManager.default.removeItem(at: tempDirectory) @@ -29,14 +28,14 @@ class EditedPhotoTrackingTests: XCTestCase { testFileManager = nil super.tearDown() } - + // MARK: - Edited Photo Saving Tests - + func testSavePhoto_WithEditedFlag_ShouldMarkAsEdited() throws { // Create test image data let testImage = UIImage(systemName: "photo")! let imageData = testImage.jpegData(compressionQuality: 0.9)! - + // Save photo with edited flag let filename = try testFileManager.savePhoto( imageData, @@ -44,93 +43,93 @@ class EditedPhotoTrackingTests: XCTestCase { isEdited: true, originalFilename: "original_photo_123" ) - + // Verify file was saved XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Load the metadata and verify edited flag let (_, metadata) = try testFileManager.loadPhoto(filename: filename) - + XCTAssertTrue(metadata["isEdited"] as? Bool == true, "Photo should be marked as edited") XCTAssertEqual(metadata["originalFilename"] as? String, "original_photo_123", "Original filename should be preserved") } - + func testSavePhoto_WithoutEditedFlag_ShouldNotMarkAsEdited() throws { // Create test image data let testImage = UIImage(systemName: "photo")! let imageData = testImage.jpegData(compressionQuality: 0.9)! - + // Save photo without edited flag (default behavior) let filename = try testFileManager.savePhoto(imageData, withMetadata: [:]) - + // Verify file was saved XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Load the metadata and verify no edited flag let (_, metadata) = try testFileManager.loadPhoto(filename: filename) - + XCTAssertNil(metadata["isEdited"], "Photo should not have isEdited flag") XCTAssertNil(metadata["originalFilename"], "Photo should not have originalFilename") } - + func testSavePhoto_WithEditedFlagFalse_ShouldNotMarkAsEdited() throws { // Create test image data let testImage = UIImage(systemName: "photo")! let imageData = testImage.jpegData(compressionQuality: 0.9)! - + // Save photo with edited flag explicitly set to false let filename = try testFileManager.savePhoto( imageData, withMetadata: [:], isEdited: false ) - + // Verify file was saved XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Load the metadata and verify no edited flag let (_, metadata) = try testFileManager.loadPhoto(filename: filename) - + XCTAssertNil(metadata["isEdited"], "Photo should not have isEdited flag when explicitly set to false") XCTAssertNil(metadata["originalFilename"], "Photo should not have originalFilename when not edited") } - + func testSavePhoto_WithEditedFlagButNoOriginal_ShouldMarkAsEditedWithoutOriginal() throws { // Create test image data let testImage = UIImage(systemName: "photo")! let imageData = testImage.jpegData(compressionQuality: 0.9)! - + // Save photo with edited flag but no original filename let filename = try testFileManager.savePhoto( imageData, withMetadata: [:], isEdited: true ) - + // Verify file was saved XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Load the metadata and verify edited flag without original let (_, metadata) = try testFileManager.loadPhoto(filename: filename) - + XCTAssertTrue(metadata["isEdited"] as? Bool == true, "Photo should be marked as edited") XCTAssertNil(metadata["originalFilename"], "Photo should not have originalFilename when not provided") } - + // MARK: - Metadata Preservation Tests - + func testSavePhoto_WithExistingMetadata_ShouldPreserveAndAddEditedFlag() throws { // Create test image data let testImage = UIImage(systemName: "photo")! let imageData = testImage.jpegData(compressionQuality: 0.9)! - + // Create existing metadata let existingMetadata: [String: Any] = [ "customField": "customValue", "imported": true, - "importSource": "PhotosPicker" + "importSource": "PhotosPicker", ] - + // Save photo with edited flag and existing metadata let filename = try testFileManager.savePhoto( imageData, @@ -138,30 +137,30 @@ class EditedPhotoTrackingTests: XCTestCase { isEdited: true, originalFilename: "original_photo_456" ) - + // Load the metadata and verify everything is preserved let (_, metadata) = try testFileManager.loadPhoto(filename: filename) - + // Check edited flag and original filename were added XCTAssertTrue(metadata["isEdited"] as? Bool == true, "Photo should be marked as edited") XCTAssertEqual(metadata["originalFilename"] as? String, "original_photo_456", "Original filename should be preserved") - + // Check existing metadata was preserved XCTAssertEqual(metadata["customField"] as? String, "customValue", "Custom metadata should be preserved") XCTAssertTrue(metadata["imported"] as? Bool == true, "Imported flag should be preserved") XCTAssertEqual(metadata["importSource"] as? String, "PhotosPicker", "Import source should be preserved") - + // Check automatic metadata was added XCTAssertNotNil(metadata["creationDate"], "Creation date should be added automatically") } - + // MARK: - Edge Cases - + func testSavePhoto_WithEmptyOriginalFilename_ShouldMarkAsEditedWithEmptyOriginal() throws { // Create test image data let testImage = UIImage(systemName: "photo")! let imageData = testImage.jpegData(compressionQuality: 0.9)! - + // Save photo with edited flag and empty original filename let filename = try testFileManager.savePhoto( imageData, @@ -169,11 +168,11 @@ class EditedPhotoTrackingTests: XCTestCase { isEdited: true, originalFilename: "" ) - + // Load the metadata and verify edited flag with empty original let (_, metadata) = try testFileManager.loadPhoto(filename: filename) - + XCTAssertTrue(metadata["isEdited"] as? Bool == true, "Photo should be marked as edited") XCTAssertEqual(metadata["originalFilename"] as? String, "", "Empty original filename should be preserved") } -} \ No newline at end of file +} diff --git a/SnapSafeTests/FaceDetectorTests.swift b/SnapSafeTests/FaceDetectorTests.swift index 3c9b8ae..208767f 100644 --- a/SnapSafeTests/FaceDetectorTests.swift +++ b/SnapSafeTests/FaceDetectorTests.swift @@ -5,175 +5,174 @@ // Created by Bill Booth on 5/25/25. // -import XCTest +@testable import SnapSafe import UIKit import Vision -@testable import SnapSafe +import XCTest class FaceDetectorTests: XCTestCase { - private var faceDetector: FaceDetector! private var testImage: UIImage! - + override func setUp() { super.setUp() faceDetector = FaceDetector() testImage = createTestImage() } - + override func tearDown() { faceDetector = nil testImage = nil super.tearDown() } - + // MARK: - Face Detection Tests - + /// Tests that detectFaces() handles nil CGImage gracefully /// Assertion: Should return empty array when image cannot be converted to CGImage func testDetectFaces_HandlesInvalidImage() { let expectation = XCTestExpectation(description: "Face detection should complete") - + // Create image with no CGImage backing let invalidImage = UIImage() - + faceDetector.detectFaces(in: invalidImage) { detectedFaces in XCTAssertTrue(detectedFaces.isEmpty, "Should return empty array for invalid image") expectation.fulfill() } - + wait(for: [expectation], timeout: 2.0) } - + /// Tests that detectFaces() processes valid images asynchronously /// Assertion: Should complete without throwing and return results via completion handler func testDetectFaces_ProcessesValidImageAsynchronously() { let expectation = XCTestExpectation(description: "Face detection should complete") - + faceDetector.detectFaces(in: testImage) { detectedFaces in // Should complete without crashing XCTAssertNotNil(detectedFaces, "Should return non-nil array") expectation.fulfill() } - + wait(for: [expectation], timeout: 5.0) } - + /// Tests that detectFaces() returns DetectedFace objects with proper coordinate conversion /// Assertion: Detected faces should have bounds within image dimensions func testDetectFaces_ReturnsValidCoordinates() { let expectation = XCTestExpectation(description: "Face detection should complete") - + faceDetector.detectFaces(in: testImage) { detectedFaces in for face in detectedFaces { // Assert face bounds are within image dimensions XCTAssertGreaterThanOrEqual(face.bounds.minX, 0, "Face X coordinate should be >= 0") XCTAssertGreaterThanOrEqual(face.bounds.minY, 0, "Face Y coordinate should be >= 0") - XCTAssertLessThanOrEqual(face.bounds.maxX, self.testImage.size.width, - "Face should be within image width") - XCTAssertLessThanOrEqual(face.bounds.maxY, self.testImage.size.height, - "Face should be within image height") - + XCTAssertLessThanOrEqual(face.bounds.maxX, self.testImage.size.width, + "Face should be within image width") + XCTAssertLessThanOrEqual(face.bounds.maxY, self.testImage.size.height, + "Face should be within image height") + // Assert face has positive dimensions XCTAssertGreaterThan(face.bounds.width, 0, "Face width should be positive") XCTAssertGreaterThan(face.bounds.height, 0, "Face height should be positive") } expectation.fulfill() } - + wait(for: [expectation], timeout: 5.0) } - + /// Tests that detectFaces() handles Vision framework errors gracefully /// Assertion: Should return empty array when Vision processing fails func testDetectFaces_HandlesVisionErrors() { let expectation = XCTestExpectation(description: "Face detection should handle errors") - + // Create a very small image that might cause Vision issues let tinyImage = createTestImage(size: CGSize(width: 1, height: 1)) - + faceDetector.detectFaces(in: tinyImage) { detectedFaces in // Should not crash and return some result XCTAssertNotNil(detectedFaces, "Should return array even on potential Vision errors") expectation.fulfill() } - + wait(for: [expectation], timeout: 3.0) } - + // MARK: - Face Masking Tests - + /// Tests that maskFaces() returns original image when no faces are selected /// Assertion: Should return original image unchanged when no faces are selected for masking func testMaskFaces_ReturnsOriginalWhenNoFacesSelected() { let face1 = DetectedFace(bounds: CGRect(x: 10, y: 10, width: 50, height: 50), isSelected: false) let face2 = DetectedFace(bounds: CGRect(x: 100, y: 100, width: 60, height: 60), isSelected: false) let faces = [face1, face2] - + let result = faceDetector.maskFaces(in: testImage, faces: faces, modes: [.blur]) - + XCTAssertNotNil(result, "Should return a valid image") // Note: Exact pixel comparison is complex, so we verify basic properties XCTAssertEqual(result?.size, testImage.size, "Result should have same dimensions as original") } - + /// Tests that maskFaces() returns original image when modes array is empty /// Assertion: Should return original image when no masking modes are specified func testMaskFaces_ReturnsOriginalWhenNoModes() { let face = DetectedFace(bounds: CGRect(x: 10, y: 10, width: 50, height: 50), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: []) - + XCTAssertNotNil(result, "Should return a valid image") XCTAssertEqual(result?.size, testImage.size, "Result should have same dimensions as original") } - + /// Tests that maskFaces() processes selected faces with blur mode /// Assertion: Should return modified image when faces are selected and blur mode is applied func testMaskFaces_ProcessesSelectedFacesWithBlur() { let selectedFace = DetectedFace(bounds: CGRect(x: 50, y: 50, width: 100, height: 100), isSelected: true) let unselectedFace = DetectedFace(bounds: CGRect(x: 200, y: 200, width: 80, height: 80), isSelected: false) let faces = [selectedFace, unselectedFace] - + let result = faceDetector.maskFaces(in: testImage, faces: faces, modes: [.blur]) - + XCTAssertNotNil(result, "Should return a valid blurred image") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + /// Tests that maskFaces() handles blackout mode correctly /// Assertion: Should apply blackout effect to selected faces func testMaskFaces_AppliesBlackoutMode() { let face = DetectedFace(bounds: CGRect(x: 25, y: 25, width: 50, height: 50), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blackout]) - + XCTAssertNotNil(result, "Should return image with blackout effect") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + /// Tests that maskFaces() handles pixelate mode correctly /// Assertion: Should apply pixelation effect to selected faces func testMaskFaces_AppliesPixelateMode() { let face = DetectedFace(bounds: CGRect(x: 30, y: 30, width: 60, height: 60), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.pixelate]) - + XCTAssertNotNil(result, "Should return image with pixelation effect") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + /// Tests that maskFaces() handles noise mode correctly /// Assertion: Should apply noise effect to selected faces func testMaskFaces_AppliesNoiseMode() { let face = DetectedFace(bounds: CGRect(x: 40, y: 40, width: 70, height: 70), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.noise]) - + XCTAssertNotNil(result, "Should return image with noise effect") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + /// Tests that maskFaces() handles multiple selected faces /// Assertion: Should apply masking to all selected faces func testMaskFaces_HandlesMultipleSelectedFaces() { @@ -181,159 +180,159 @@ class FaceDetectorTests: XCTestCase { let face2 = DetectedFace(bounds: CGRect(x: 80, y: 80, width: 50, height: 50), isSelected: true) let face3 = DetectedFace(bounds: CGRect(x: 150, y: 150, width: 45, height: 45), isSelected: false) let faces = [face1, face2, face3] - + let result = faceDetector.maskFaces(in: testImage, faces: faces, modes: [.blur]) - + XCTAssertNotNil(result, "Should return image with multiple faces masked") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + /// Tests that maskFaces() uses first mode when multiple modes are provided /// Assertion: Should use primary (first) mode for processing when multiple modes are specified func testMaskFaces_UsesPrimaryModeFromMultipleModes() { let face = DetectedFace(bounds: CGRect(x: 35, y: 35, width: 55, height: 55), isSelected: true) - + // Provide multiple modes - should use first one (blur) let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blur, .pixelate, .blackout]) - + XCTAssertNotNil(result, "Should return image processed with primary mode") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + // MARK: - Helper Method Tests - + /// Tests that coerceRectToImage() properly constrains rectangles within image bounds /// Assertion: Should return rectangle that is always within image boundaries func testCoerceRectToImage_ConstrainsRectangleWithinBounds() { // Use reflection to access private method for testing let method = class_getInstanceMethod(FaceDetector.self, Selector(("coerceRectToImage:image:"))) // XCTAssertNotNil(method, "coerceRectToImage method should exist") - + // Test with rectangle extending outside image bounds let oversizedRect = CGRect(x: -10, y: -10, width: testImage.size.width + 20, height: testImage.size.height + 20) - + // Since we can't easily access private method, we'll test the public behavior // by creating a face that would require coercion let face = DetectedFace(bounds: oversizedRect, isSelected: true) let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blackout]) - + // Should not crash and should return valid image XCTAssertNotNil(result, "Should handle oversized rectangles without crashing") } - + /// Tests that coerceRectToImage() handles completely outside rectangles /// Assertion: Should create small valid rectangle when input is completely outside image func testCoerceRectToImage_HandlesCompletelyOutsideRectangles() { // Test with rectangle completely outside image let outsideRect = CGRect(x: testImage.size.width + 100, y: testImage.size.height + 100, width: 50, height: 50) let face = DetectedFace(bounds: outsideRect, isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blackout]) - + // Should handle gracefully without crashing XCTAssertNotNil(result, "Should handle completely outside rectangles") } - + // MARK: - Blur Faces Convenience Method Tests - + /// Tests that blurFaces() is a convenience wrapper for maskFaces() with blur mode /// Assertion: Should apply blur masking to selected faces func testBlurFaces_IsConvenienceWrapperForBlurMode() { let face = DetectedFace(bounds: CGRect(x: 45, y: 45, width: 65, height: 65), isSelected: true) - + let result = faceDetector.blurFaces(in: testImage, faces: [face]) - + XCTAssertNotNil(result, "blurFaces should return valid result") XCTAssertEqual(result?.size, testImage.size, "Result should maintain original dimensions") } - + // MARK: - Image Processing Algorithm Tests - + /// Tests that pixelate algorithm maintains image structure while reducing detail /// Assertion: Pixelated image should have similar overall structure but reduced detail func testPixelateAlgorithm_MaintainsImageStructure() { let face = DetectedFace(bounds: CGRect(x: 60, y: 60, width: 80, height: 80), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.pixelate]) - + XCTAssertNotNil(result, "Pixelation should produce valid result") // Pixelated image should still be recognizable as an image XCTAssertEqual(result?.size, testImage.size, "Pixelated image should maintain size") } - + /// Tests that blur algorithm produces smoothed regions /// Assertion: Blurred regions should lose sharp detail while maintaining general appearance func testBlurAlgorithm_ProducesSmoothRegions() { let face = DetectedFace(bounds: CGRect(x: 70, y: 70, width: 90, height: 90), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blur]) - + XCTAssertNotNil(result, "Blur should produce valid result") XCTAssertEqual(result?.size, testImage.size, "Blurred image should maintain size") } - + /// Tests that noise algorithm generates random pattern /// Assertion: Noise effect should replace image data with random values func testNoiseAlgorithm_GeneratesRandomPattern() { let face = DetectedFace(bounds: CGRect(x: 55, y: 55, width: 75, height: 75), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.noise]) - + XCTAssertNotNil(result, "Noise should produce valid result") XCTAssertEqual(result?.size, testImage.size, "Noise image should maintain size") } - + // MARK: - Memory and Performance Tests - + /// Tests that face detection completes within reasonable time /// Assertion: Face detection should complete within performance threshold func testFaceDetection_CompletesWithinReasonableTime() { let expectation = XCTestExpectation(description: "Face detection should complete quickly") let startTime = Date() - + faceDetector.detectFaces(in: testImage) { _ in let elapsedTime = Date().timeIntervalSince(startTime) XCTAssertLessThan(elapsedTime, 10.0, "Face detection should complete within 10 seconds") expectation.fulfill() } - + wait(for: [expectation], timeout: 15.0) } - + /// Tests that masking operations complete efficiently /// Assertion: Face masking should not cause significant delay or memory issues func testFaceMasking_CompletesEfficiently() { let face = DetectedFace(bounds: CGRect(x: 50, y: 50, width: 100, height: 100), isSelected: true) - + measure { let _ = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blur]) } } - + /// Tests that multiple masking operations don't cause memory leaks /// Assertion: Should handle multiple operations without excessive memory growth func testMultipleMaskingOperations_HandleMemoryEfficiently() { let face = DetectedFace(bounds: CGRect(x: 40, y: 40, width: 80, height: 80), isSelected: true) - + // Perform multiple operations to test memory handling - for _ in 0..<10 { + for _ in 0 ..< 10 { let result = faceDetector.maskFaces(in: testImage, faces: [face], modes: [.blur]) XCTAssertNotNil(result, "Each operation should succeed") } } - + // MARK: - Edge Case Tests - + /// Tests that very small face rectangles are handled correctly /// Assertion: Should handle faces with minimal dimensions without errors func testVerySmallFaceRectangles_HandledCorrectly() { let tinyFace = DetectedFace(bounds: CGRect(x: 10, y: 10, width: 1, height: 1), isSelected: true) - + let result = faceDetector.maskFaces(in: testImage, faces: [tinyFace], modes: [.blur]) - + XCTAssertNotNil(result, "Should handle very small face rectangles") } - + /// Tests that very large face rectangles are handled correctly /// Assertion: Should handle faces that cover most of the image func testVeryLargeFaceRectangles_HandledCorrectly() { @@ -341,27 +340,27 @@ class FaceDetectorTests: XCTestCase { bounds: CGRect(x: 5, y: 5, width: testImage.size.width - 10, height: testImage.size.height - 10), isSelected: true ) - + let result = faceDetector.maskFaces(in: testImage, faces: [largeFace], modes: [.blackout]) - + XCTAssertNotNil(result, "Should handle very large face rectangles") } - + /// Tests that zero-sized rectangles are handled gracefully /// Assertion: Should not crash with zero-width or zero-height rectangles func testZeroSizedRectangles_HandledGracefully() { let zeroWidthFace = DetectedFace(bounds: CGRect(x: 50, y: 50, width: 0, height: 50), isSelected: true) let zeroHeightFace = DetectedFace(bounds: CGRect(x: 100, y: 100, width: 50, height: 0), isSelected: true) - + let result1 = faceDetector.maskFaces(in: testImage, faces: [zeroWidthFace], modes: [.blur]) let result2 = faceDetector.maskFaces(in: testImage, faces: [zeroHeightFace], modes: [.blur]) - + XCTAssertNotNil(result1, "Should handle zero-width rectangles") XCTAssertNotNil(result2, "Should handle zero-height rectangles") } - + // MARK: - Helper Methods - + /// Creates a test image for use in tests private func createTestImage(size: CGSize = CGSize(width: 300, height: 300)) -> UIImage { let renderer = UIGraphicsImageRenderer(size: size) @@ -369,17 +368,17 @@ class FaceDetectorTests: XCTestCase { // Create a simple gradient background context.cgContext.setFillColor(UIColor.blue.cgColor) context.cgContext.fill(CGRect(origin: .zero, size: size)) - + // Add some geometric shapes to make it more interesting for Vision context.cgContext.setFillColor(UIColor.white.cgColor) - context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.3, y: size.height * 0.3, - width: size.width * 0.4, height: size.height * 0.4)) - + context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.3, y: size.height * 0.3, + width: size.width * 0.4, height: size.height * 0.4)) + context.cgContext.setFillColor(UIColor.black.cgColor) - context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.4, y: size.height * 0.4, - width: size.width * 0.1, height: size.height * 0.1)) - context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.5, y: size.height * 0.4, - width: size.width * 0.1, height: size.height * 0.1)) + context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.4, y: size.height * 0.4, + width: size.width * 0.1, height: size.height * 0.1)) + context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.5, y: size.height * 0.4, + width: size.width * 0.1, height: size.height * 0.1)) } } } diff --git a/SnapSafeTests/GalleryFilteringTests.swift b/SnapSafeTests/GalleryFilteringTests.swift deleted file mode 100644 index 59ec776..0000000 --- a/SnapSafeTests/GalleryFilteringTests.swift +++ /dev/null @@ -1,228 +0,0 @@ -// -// GalleryFilteringTests.swift -// SnapSafeTests -// -// Created by Bill Booth on 5/26/25. -// - -import XCTest -@testable import SnapSafe - -class GalleryFilteringTests: XCTestCase { - - var testPhotos: [SecurePhoto]! - - override func setUp() { - super.setUp() - createTestPhotos() - } - - override func tearDown() { - testPhotos = nil - super.tearDown() - } - - // MARK: - Filter Logic Tests - - func testFilterPhotos_AllFilter_ShouldReturnAllPhotos() { - // Test that .all filter returns all photos - let filteredPhotos = applyFilter(.all, to: testPhotos) - - XCTAssertEqual(filteredPhotos.count, testPhotos.count, "All filter should return all photos") - XCTAssertEqual(filteredPhotos, testPhotos, "All filter should return the same photos") - } - - func testFilterPhotos_ImportedFilter_ShouldReturnOnlyImportedPhotos() { - // Test that .imported filter returns only imported photos - let filteredPhotos = applyFilter(.imported, to: testPhotos) - - let expectedCount = testPhotos.filter { $0.metadata["imported"] as? Bool == true }.count - XCTAssertEqual(filteredPhotos.count, expectedCount, "Imported filter should return correct count") - - // Verify all returned photos are imported - for photo in filteredPhotos { - XCTAssertTrue(photo.metadata["imported"] as? Bool == true, "All filtered photos should be imported") - } - } - - func testFilterPhotos_EditedFilter_ShouldReturnOnlyEditedPhotos() { - // Test that .edited filter returns only edited photos - let filteredPhotos = applyFilter(.edited, to: testPhotos) - - let expectedCount = testPhotos.filter { $0.metadata["isEdited"] as? Bool == true }.count - XCTAssertEqual(filteredPhotos.count, expectedCount, "Edited filter should return correct count") - - // Verify all returned photos are edited - for photo in filteredPhotos { - XCTAssertTrue(photo.metadata["isEdited"] as? Bool == true, "All filtered photos should be edited") - } - } - - func testFilterPhotos_WithLocationFilter_ShouldReturnOnlyPhotosWithGPS() { - // Test that .withLocation filter returns only photos with GPS data - let filteredPhotos = applyFilter(.withLocation, to: testPhotos) - - let expectedCount = testPhotos.filter { hasGPSData($0) }.count - XCTAssertEqual(filteredPhotos.count, expectedCount, "Location filter should return correct count") - - // Verify all returned photos have GPS data - for photo in filteredPhotos { - XCTAssertTrue(hasGPSData(photo), "All filtered photos should have GPS data") - } - } - - func testFilterPhotos_EmptyResults_ShouldHandleGracefully() { - // Test filtering when no photos match criteria - let photosWithoutGPS = testPhotos.filter { !hasGPSData($0) } - - // Apply location filter to photos without GPS - let filteredPhotos = applyFilter(.withLocation, to: photosWithoutGPS) - - XCTAssertEqual(filteredPhotos.count, 0, "Filter should return empty array when no photos match") - XCTAssertTrue(filteredPhotos.isEmpty, "Filtered array should be empty") - } - - func testFilterPhotos_MixedCriteria_ShouldFilterCorrectly() { - // Test that photos can match multiple criteria - let importedAndEditedPhotos = testPhotos.filter { photo in - let isImported = photo.metadata["imported"] as? Bool == true - let isEdited = photo.metadata["isEdited"] as? Bool == true - return isImported && isEdited - } - - XCTAssertGreaterThan(importedAndEditedPhotos.count, 0, "Should have photos that are both imported and edited") - - // Apply imported filter - should include imported+edited photos - let importedFiltered = applyFilter(.imported, to: testPhotos) - for photo in importedAndEditedPhotos { - XCTAssertTrue(importedFiltered.contains(photo), "Imported+edited photos should appear in imported filter") - } - - // Apply edited filter - should include imported+edited photos - let editedFiltered = applyFilter(.edited, to: testPhotos) - for photo in importedAndEditedPhotos { - XCTAssertTrue(editedFiltered.contains(photo), "Imported+edited photos should appear in edited filter") - } - } - - // MARK: - Edge Cases - - func testFilterPhotos_EmptyPhotoArray_ShouldReturnEmpty() { - // Test filtering empty array - let emptyPhotos: [SecurePhoto] = [] - - for filter in PhotoFilter.allCases { - let filteredPhotos = applyFilter(filter, to: emptyPhotos) - XCTAssertEqual(filteredPhotos.count, 0, "Filtering empty array should return empty array for \\(filter)") - } - } - - func testFilterPhotos_PhotosWithMissingMetadata_ShouldHandleGracefully() { - // Create photos with minimal metadata - let minimalPhoto = createTestPhoto(metadata: [:]) - let photosWithMinimal = [minimalPhoto] - - // Test all filters with minimal metadata - let importedFiltered = applyFilter(.imported, to: photosWithMinimal) - XCTAssertEqual(importedFiltered.count, 0, "Photos without imported flag should not match imported filter") - - let editedFiltered = applyFilter(.edited, to: photosWithMinimal) - XCTAssertEqual(editedFiltered.count, 0, "Photos without edited flag should not match edited filter") - - let locationFiltered = applyFilter(.withLocation, to: photosWithMinimal) - XCTAssertEqual(locationFiltered.count, 0, "Photos without GPS data should not match location filter") - - let allFiltered = applyFilter(.all, to: photosWithMinimal) - XCTAssertEqual(allFiltered.count, 1, "Photos should still appear in all filter") - } - - // MARK: - Helper Methods - - private func createTestPhotos() { - testPhotos = [ - // Regular photo (taken in app) - createTestPhoto(metadata: [ - "creationDate": Date().timeIntervalSince1970 - ]), - - // Imported photo - createTestPhoto(metadata: [ - "imported": true, - "importSource": "PhotosPicker", - "creationDate": Date().timeIntervalSince1970 - ]), - - // Edited photo - createTestPhoto(metadata: [ - "isEdited": true, - "originalFilename": "original_photo_123", - "creationDate": Date().timeIntervalSince1970 - ]), - - // Photo with GPS data - createTestPhoto(metadata: [ - "creationDate": Date().timeIntervalSince1970, - String(kCGImagePropertyGPSDictionary): [ - String(kCGImagePropertyGPSLatitude): 37.7749, - String(kCGImagePropertyGPSLatitudeRef): "N", - String(kCGImagePropertyGPSLongitude): -122.4194, - String(kCGImagePropertyGPSLongitudeRef): "W" - ] - ]), - - // Imported and edited photo with GPS - createTestPhoto(metadata: [ - "imported": true, - "importSource": "PhotosPicker", - "isEdited": true, - "originalFilename": "imported_original_456", - "creationDate": Date().timeIntervalSince1970, - String(kCGImagePropertyGPSDictionary): [ - String(kCGImagePropertyGPSLatitude): 40.7128, - String(kCGImagePropertyGPSLatitudeRef): "N" - ] - ]), - - // Photo with empty GPS dictionary - createTestPhoto(metadata: [ - "creationDate": Date().timeIntervalSince1970, - String(kCGImagePropertyGPSDictionary): [:] - ]) - ] - } - - private func createTestPhoto(metadata: [String: Any]) -> SecurePhoto { - let testURL = URL(fileURLWithPath: "/test/path/\\(UUID().uuidString)") - return SecurePhoto(filename: "test_\\(UUID().uuidString)", metadata: metadata, fileURL: testURL) - } - - // Apply filter logic that matches SecureGalleryView implementation - private func applyFilter(_ filter: PhotoFilter, to photos: [SecurePhoto]) -> [SecurePhoto] { - switch filter { - case .all: - return photos - case .imported: - return photos.filter { $0.metadata["imported"] as? Bool == true } - case .edited: - return photos.filter { $0.metadata["isEdited"] as? Bool == true } - case .withLocation: - return photos.filter { photo in - guard let gpsData = photo.metadata[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { return false } - - let hasLatitude = gpsData[String(kCGImagePropertyGPSLatitude)] != nil - let hasLongitude = gpsData[String(kCGImagePropertyGPSLongitude)] != nil - - return hasLatitude || hasLongitude - } - } - } - - private func hasGPSData(_ photo: SecurePhoto) -> Bool { - guard let gpsData = photo.metadata[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { return false } - - let hasLatitude = gpsData[String(kCGImagePropertyGPSLatitude)] != nil - let hasLongitude = gpsData[String(kCGImagePropertyGPSLongitude)] != nil - - return hasLatitude || hasLongitude - } -} \ No newline at end of file diff --git a/SnapSafeTests/LocationManagerTests.swift b/SnapSafeTests/LocationManagerTests.swift index 028dbd1..2f4a4d7 100644 --- a/SnapSafeTests/LocationManagerTests.swift +++ b/SnapSafeTests/LocationManagerTests.swift @@ -5,69 +5,68 @@ // Created by Bill Booth on 5/25/25. // -import XCTest -import CoreLocation import Combine +import CoreLocation @testable import SnapSafe +import XCTest class LocationManagerTests: XCTestCase { - private var locationManager: LocationManager! private var cancellables: Set! - + override func setUp() { super.setUp() locationManager = LocationManager() cancellables = Set() - + // Reset UserDefaults for testing UserDefaults.standard.removeObject(forKey: "shouldIncludeLocationData") } - + override func tearDown() { // Clean up UserDefaults UserDefaults.standard.removeObject(forKey: "shouldIncludeLocationData") - + cancellables?.removeAll() cancellables = nil locationManager = nil super.tearDown() } - + // MARK: - Initialization Tests - + /// Tests that LocationManager initializes with correct default values /// Assertion: Should have proper initial state for authorization, location, and user preferences func testInit_SetsCorrectDefaults() { // Reset defaults and create new instance to test initialization UserDefaults.standard.removeObject(forKey: "shouldIncludeLocationData") let newLocationManager = LocationManager() - + XCTAssertEqual(newLocationManager.authorizationStatus, CLLocationManager().authorizationStatus, - "Authorization status should match system default") + "Authorization status should match system default") XCTAssertNil(newLocationManager.lastLocation, "Last location should be nil initially") XCTAssertFalse(newLocationManager.shouldIncludeLocationData, - "Should not include location data by default") + "Should not include location data by default") } - + /// Tests that LocationManager loads saved user preferences from UserDefaults /// Assertion: Should restore shouldIncludeLocationData from saved preferences func testInit_LoadsSavedPreferences() { // Save preference and create new instance UserDefaults.standard.set(true, forKey: "shouldIncludeLocationData") let newLocationManager = LocationManager() - + XCTAssertTrue(newLocationManager.shouldIncludeLocationData, - "Should load saved preference for location data inclusion") + "Should load saved preference for location data inclusion") } - + // MARK: - Location Data Preference Tests - + /// Tests that setIncludeLocationData() updates both the property and UserDefaults /// Assertion: Should persist preference and update published property synchronously func testSetIncludeLocationData_UpdatesPropertyAndUserDefaults() { let expectation = XCTestExpectation(description: "shouldIncludeLocationData should update") - + // Monitor property changes locationManager.$shouldIncludeLocationData .dropFirst() // Skip initial value @@ -76,31 +75,31 @@ class LocationManagerTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + locationManager.setIncludeLocationData(true) - + // Assert UserDefaults is updated XCTAssertTrue(UserDefaults.standard.bool(forKey: "shouldIncludeLocationData"), - "UserDefaults should be updated") - + "UserDefaults should be updated") + wait(for: [expectation], timeout: 1.0) } - + /// Tests that setIncludeLocationData(false) properly disables location inclusion /// Assertion: Should set preference to false and persist in UserDefaults func testSetIncludeLocationData_DisablesLocationInclusion() { // First enable, then disable locationManager.setIncludeLocationData(true) locationManager.setIncludeLocationData(false) - + XCTAssertFalse(locationManager.shouldIncludeLocationData, - "shouldIncludeLocationData should be false") + "shouldIncludeLocationData should be false") XCTAssertFalse(UserDefaults.standard.bool(forKey: "shouldIncludeLocationData"), - "UserDefaults should reflect disabled preference") + "UserDefaults should reflect disabled preference") } - + // MARK: - Authorization Status Tests - + /// Tests that getAuthorizationStatusString() returns correct string representations /// Assertion: Should provide user-friendly strings for all authorization status cases func testGetAuthorizationStatusString_ReturnsCorrectStrings() { @@ -109,180 +108,180 @@ class LocationManagerTests: XCTestCase { (.restricted, "Restricted"), (.denied, "Denied"), (.authorizedWhenInUse, "Authorized"), - (.authorizedAlways, "Authorized") + (.authorizedAlways, "Authorized"), ] - + for (status, expectedString) in testCases { locationManager.authorizationStatus = status let statusString = locationManager.getAuthorizationStatusString() XCTAssertEqual(statusString, expectedString, - "Status \(status) should return '\(expectedString)'") + "Status \(status) should return '\(expectedString)'") } } - + // MARK: - Location Metadata Tests - + /// Tests that getCurrentLocationMetadata() returns nil when location data is disabled /// Assertion: Should not provide metadata when user has disabled location inclusion func testGetCurrentLocationMetadata_ReturnsNilWhenDisabled() { locationManager.setIncludeLocationData(false) locationManager.authorizationStatus = .authorizedWhenInUse locationManager.lastLocation = createTestLocation() - + let metadata = locationManager.getCurrentLocationMetadata() - + XCTAssertNil(metadata, "Should return nil when location data inclusion is disabled") } - + /// Tests that getCurrentLocationMetadata() returns nil when not authorized /// Assertion: Should not provide metadata without proper authorization func testGetCurrentLocationMetadata_ReturnsNilWhenNotAuthorized() { locationManager.setIncludeLocationData(true) locationManager.authorizationStatus = .denied locationManager.lastLocation = createTestLocation() - + let metadata = locationManager.getCurrentLocationMetadata() - + XCTAssertNil(metadata, "Should return nil when location access is not authorized") } - + /// Tests that getCurrentLocationMetadata() returns nil when no location is available /// Assertion: Should not provide metadata when lastLocation is nil func testGetCurrentLocationMetadata_ReturnsNilWhenNoLocation() { locationManager.setIncludeLocationData(true) locationManager.authorizationStatus = .authorizedWhenInUse locationManager.lastLocation = nil - + let metadata = locationManager.getCurrentLocationMetadata() - + XCTAssertNil(metadata, "Should return nil when no location is available") } - + /// Tests that getCurrentLocationMetadata() returns proper GPS metadata when conditions are met /// Assertion: Should create valid GPS metadata dictionary with latitude, longitude, and timestamp func testGetCurrentLocationMetadata_ReturnsValidGPSMetadata() { locationManager.setIncludeLocationData(true) locationManager.authorizationStatus = .authorizedWhenInUse - + let testLocation = createTestLocation( - latitude: 37.7749, // San Francisco + latitude: 37.7749, // San Francisco longitude: -122.4194, altitude: 100.0 ) locationManager.lastLocation = testLocation - + let metadata = locationManager.getCurrentLocationMetadata() - + XCTAssertNotNil(metadata, "Should return metadata when conditions are met") - + guard let gpsDict = metadata?[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { XCTFail("Should contain GPS dictionary") return } - + // Test latitude XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLatitudeRef)] as? String, "N", - "Latitude reference should be North for positive latitude") + "Latitude reference should be North for positive latitude") XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLatitude)] as? Double, 37.7749, - "Latitude should match test location") - + "Latitude should match test location") + // Test longitude XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLongitudeRef)] as? String, "W", - "Longitude reference should be West for negative longitude") + "Longitude reference should be West for negative longitude") XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLongitude)] as? Double, 122.4194, - "Longitude should be absolute value") - + "Longitude should be absolute value") + // Test altitude XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSAltitudeRef)] as? Int, 0, - "Altitude reference should be 0 for above sea level") + "Altitude reference should be 0 for above sea level") XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSAltitude)] as? Double, 100.0, - "Altitude should match test location") - + "Altitude should match test location") + // Test timestamp XCTAssertNotNil(gpsDict[String(kCGImagePropertyGPSDateStamp)], - "Should include GPS timestamp") + "Should include GPS timestamp") } - + /// Tests that getCurrentLocationMetadata() handles negative coordinates correctly /// Assertion: Should set proper hemisphere references for Southern/Western coordinates func testGetCurrentLocationMetadata_HandlesNegativeCoordinates() { locationManager.setIncludeLocationData(true) locationManager.authorizationStatus = .authorizedWhenInUse - + let testLocation = createTestLocation( - latitude: -33.8688, // Sydney (Southern Hemisphere) - longitude: 151.2093, // Sydney (Eastern Hemisphere) - altitude: -10.0 // Below sea level + latitude: -33.8688, // Sydney (Southern Hemisphere) + longitude: 151.2093, // Sydney (Eastern Hemisphere) + altitude: -10.0 // Below sea level ) locationManager.lastLocation = testLocation - + let metadata = locationManager.getCurrentLocationMetadata() - + guard let gpsDict = metadata?[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { XCTFail("Should contain GPS dictionary") return } - + // Test negative latitude (Southern Hemisphere) XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLatitudeRef)] as? String, "S", - "Latitude reference should be South for negative latitude") + "Latitude reference should be South for negative latitude") XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLatitude)] as? Double, 33.8688, - "Latitude should be absolute value") - + "Latitude should be absolute value") + // Test positive longitude (Eastern Hemisphere) XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLongitudeRef)] as? String, "E", - "Longitude reference should be East for positive longitude") + "Longitude reference should be East for positive longitude") XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSLongitude)] as? Double, 151.2093, - "Longitude should match test location") - + "Longitude should match test location") + // Test negative altitude (below sea level) XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSAltitudeRef)] as? Int, 1, - "Altitude reference should be 1 for below sea level") + "Altitude reference should be 1 for below sea level") XCTAssertEqual(gpsDict[String(kCGImagePropertyGPSAltitude)] as? Double, 10.0, - "Altitude should be absolute value") + "Altitude should be absolute value") } - + /// Tests that getCurrentLocationMetadata() handles location with poor vertical accuracy /// Assertion: Should exclude altitude data when vertical accuracy is poor func testGetCurrentLocationMetadata_HandlesPoorVerticalAccuracy() { locationManager.setIncludeLocationData(true) locationManager.authorizationStatus = .authorizedWhenInUse - + let testLocation = createTestLocation( latitude: 40.7128, longitude: -74.0060, altitude: 50.0, - verticalAccuracy: -1.0 // Negative indicates invalid reading + verticalAccuracy: -1.0 // Negative indicates invalid reading ) locationManager.lastLocation = testLocation - + let metadata = locationManager.getCurrentLocationMetadata() - + guard let gpsDict = metadata?[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { XCTFail("Should contain GPS dictionary") return } - + // Should not include altitude data when vertical accuracy is poor XCTAssertNil(gpsDict[String(kCGImagePropertyGPSAltitudeRef)], - "Should not include altitude reference when vertical accuracy is poor") + "Should not include altitude reference when vertical accuracy is poor") XCTAssertNil(gpsDict[String(kCGImagePropertyGPSAltitude)], - "Should not include altitude when vertical accuracy is poor") - + "Should not include altitude when vertical accuracy is poor") + // Should still include latitude and longitude XCTAssertNotNil(gpsDict[String(kCGImagePropertyGPSLatitude)], - "Should still include latitude") + "Should still include latitude") XCTAssertNotNil(gpsDict[String(kCGImagePropertyGPSLongitude)], - "Should still include longitude") + "Should still include longitude") } - + // MARK: - Published Properties Tests - + /// Tests that authorizationStatus property publishes changes correctly /// Assertion: Property changes should be observable by subscribers func testAuthorizationStatus_PublishesChanges() { let expectation = XCTestExpectation(description: "authorizationStatus should publish changes") - + locationManager.$authorizationStatus .dropFirst() // Skip initial value .sink { status in @@ -290,17 +289,17 @@ class LocationManagerTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + locationManager.authorizationStatus = .authorizedWhenInUse - + wait(for: [expectation], timeout: 1.0) } - + /// Tests that lastLocation property publishes changes correctly /// Assertion: Location updates should be observable by subscribers // func testLastLocation_PublishesChanges() { // let expectation = XCTestExpectation(description: "lastLocation should publish changes") -// +// // locationManager.$lastLocation // .dropFirst() // Skip initial nil value // .sink { location in @@ -309,17 +308,17 @@ class LocationManagerTests: XCTestCase { // expectation.fulfill() // } // .store(in: &cancellables) -// +// // locationManager.lastLocation = createTestLocation() -// +// // wait(for: [expectation], timeout: 1.0) // } - + /// Tests that shouldIncludeLocationData property publishes changes correctly /// Assertion: User preference changes should be observable by subscribers func testShouldIncludeLocationData_PublishesChanges() { let expectation = XCTestExpectation(description: "shouldIncludeLocationData should publish changes") - + locationManager.$shouldIncludeLocationData .dropFirst() // Skip initial value .sink { shouldInclude in @@ -327,46 +326,46 @@ class LocationManagerTests: XCTestCase { expectation.fulfill() } .store(in: &cancellables) - + locationManager.shouldIncludeLocationData = true - + wait(for: [expectation], timeout: 1.0) } - + // MARK: - Integration Tests - + /// Tests the complete flow of enabling location data and getting metadata /// Assertion: Should properly handle the full workflow from permission to metadata generation func testLocationDataFlow_CompleteWorkflow() { // Start with disabled location data XCTAssertFalse(locationManager.shouldIncludeLocationData, - "Should start with location data disabled") - + "Should start with location data disabled") + // Enable location data locationManager.setIncludeLocationData(true) XCTAssertTrue(locationManager.shouldIncludeLocationData, - "Should enable location data") - + "Should enable location data") + // Set authorization as if user granted permission locationManager.authorizationStatus = .authorizedWhenInUse - + // Simulate location update locationManager.lastLocation = createTestLocation() - + // Get metadata let metadata = locationManager.getCurrentLocationMetadata() XCTAssertNotNil(metadata, "Should generate metadata with all conditions met") - + // Disable location data locationManager.setIncludeLocationData(false) - + // Metadata should now be nil let metadataAfterDisable = locationManager.getCurrentLocationMetadata() XCTAssertNil(metadataAfterDisable, "Should not generate metadata when disabled") } - + // MARK: - Helper Methods - + /// Creates a test CLLocation with specified coordinates private func createTestLocation( latitude: Double = 37.7749, @@ -375,7 +374,7 @@ class LocationManagerTests: XCTestCase { horizontalAccuracy: Double = 5.0, verticalAccuracy: Double = 5.0 ) -> CLLocation { - return CLLocation( + CLLocation( coordinate: CLLocationCoordinate2D(latitude: latitude, longitude: longitude), altitude: altitude, horizontalAccuracy: horizontalAccuracy, diff --git a/SnapSafeTests/PINManagerTests.swift b/SnapSafeTests/PINManagerTests.swift index 100cfc6..ab61919 100644 --- a/SnapSafeTests/PINManagerTests.swift +++ b/SnapSafeTests/PINManagerTests.swift @@ -2,15 +2,15 @@ // PINManagerTests.swift // SnapSafeTests // -// Created by Claude on 5/25/25. +// Created by Bill Booth on 5/25/25. // -import XCTest import Combine @testable import SnapSafe +import XCTest /// Comprehensive test suite for PINManager -/// +/// /// This test suite demonstrates various iOS testing patterns: /// - Unit testing with XCTest /// - Testing published properties with Combine @@ -18,52 +18,51 @@ import Combine /// - Async testing with expectations /// - Mock data and test isolation class PINManagerTests: XCTestCase { - // MARK: - Test Properties - + /// Reference to the PINManager instance under test var pinManager: PINManager! - + /// Test UserDefaults to isolate tests from real app data var testUserDefaults: UserDefaults! - + /// Combine subscriptions for testing published properties var cancellables: Set = [] - + // MARK: - Test Lifecycle - + /// Set up method called before each test method /// This ensures each test starts with a clean state override func setUp() { super.setUp() - + // Create a test-specific UserDefaults suite to avoid affecting real app data let suiteName = "PINManagerTests-\(UUID().uuidString)" testUserDefaults = UserDefaults(suiteName: suiteName)! - + // Clear any existing data in test defaults testUserDefaults.removePersistentDomain(forName: suiteName) - + // Note: We can't easily inject UserDefaults into PINManager due to singleton pattern // In a production app, we would refactor PINManager to accept UserDefaults as dependency pinManager = PINManager.shared - + // Clear any existing PIN state for testing and wait for async completion clearPINAndWait() - + // Reset requirePINOnResume to default value and wait for async completion resetRequirePINOnResumeAndWait() - + // Clear subscriptions cancellables.removeAll() - + print("Test setup completed - clean state established") } - + /// Helper method to clear PIN and wait for async update to complete private func clearPINAndWait() { let expectation = expectation(description: "PIN should be cleared") - + // If PIN is already not set, we're done if !pinManager.isPINSet { expectation.fulfill() @@ -78,21 +77,21 @@ class PINManagerTests: XCTestCase { } .store(in: &cancellables) } - + // Clear the PIN pinManager.clearPIN() - + // Wait for async update wait(for: [expectation], timeout: 1.0) - + // Clear subscriptions after setup cancellables.removeAll() } - + /// Helper method to reset requirePINOnResume to default and wait for async update private func resetRequirePINOnResumeAndWait() { let expectation = expectation(description: "requirePINOnResume should be reset to true") - + // If already true, we're done if pinManager.requirePINOnResume { expectation.fulfill() @@ -107,189 +106,189 @@ class PINManagerTests: XCTestCase { } .store(in: &cancellables) } - + // Reset to default value pinManager.setRequirePINOnResume(true) - + // Wait for async update wait(for: [expectation], timeout: 1.0) - + // Clear subscriptions after setup cancellables.removeAll() } - + /// Tear down method called after each test method override func tearDown() { // Clean up subscriptions cancellables.removeAll() - + // Clear PIN state using our helper method to ensure async completion clearPINAndWait() - + // Reset requirePINOnResume to default value resetRequirePINOnResumeAndWait() - + // Clear any UserDefaults keys that might have been set UserDefaults.standard.removeObject(forKey: "snapSafe.userPIN") UserDefaults.standard.removeObject(forKey: "snapSafe.isPINSet") UserDefaults.standard.removeObject(forKey: "snapSafe.requirePINOnResume") - + pinManager = nil testUserDefaults = nil - + super.tearDown() print("Test teardown completed") } - + // MARK: - PIN Setting Tests - + /// Test that setting a PIN updates the isPINSet property func testSetPIN_UpdatesIsPINSetProperty() { // Given: Initial state should be false XCTAssertFalse(pinManager.isPINSet, "PIN should not be set initially") - + // When: Setting a PIN let testPIN = "1234" pinManager.setPIN(testPIN) - + // Then: Wait for async update and verify using the helper method waitForPINSetUpdate(expectedValue: true) - + XCTAssertTrue(pinManager.isPINSet, "PIN should be marked as set after setPIN is called") } - + /// Test PIN setting with various valid PIN formats func testSetPIN_WithVariousPINFormats() { let testPINs = ["1234", "0000", "9876", "1111"] - + for testPIN in testPINs { // When: Setting each PIN pinManager.setPIN(testPIN) - + // Wait for async update waitForPINSetUpdate(expectedValue: true) - + // Then: Should be marked as set and verifiable XCTAssertTrue(pinManager.isPINSet, "PIN \(testPIN) should be marked as set") XCTAssertTrue(pinManager.verifyPIN(testPIN), "PIN \(testPIN) should verify correctly") - + // Clean up for next iteration pinManager.clearPIN() waitForPINSetUpdate(expectedValue: false) } } - + /// Test that setting a PIN publishes changes to observers func testSetPIN_PublishesChangesToObservers() { // Given: Expectation for published property change (only expect one fulfillment) let expectation = expectation(description: "isPINSet should be published") expectation.expectedFulfillmentCount = 1 - + var receivedValues: [Bool] = [] var hasFulfilled = false - + // Subscribe to isPINSet changes, skipping the initial value pinManager.$isPINSet .dropFirst() // Skip the initial subscription value .sink { isPINSet in receivedValues.append(isPINSet) - if isPINSet && !hasFulfilled { + if isPINSet, !hasFulfilled { hasFulfilled = true expectation.fulfill() } } .store(in: &cancellables) - + // When: Setting a PIN pinManager.setPIN("1234") - + // Then: Should receive published change waitForExpectations(timeout: 1.0) { error in XCTAssertNil(error, "Should not timeout waiting for published change") } - + XCTAssertTrue(receivedValues.contains(true), "Should have received isPINSet = true") } - + // MARK: - PIN Verification Tests - + /// Test PIN verification with correct PIN func testVerifyPIN_WithCorrectPIN_ReturnsTrue() { // Given: A PIN is set let testPIN = "1234" pinManager.setPIN(testPIN) - + // When: Verifying with correct PIN let result = pinManager.verifyPIN(testPIN) - + // Then: Should return true XCTAssertTrue(result, "Should return true when verifying correct PIN") } - + /// Test PIN verification with incorrect PIN func testVerifyPIN_WithIncorrectPIN_ReturnsFalse() { // Given: A PIN is set pinManager.setPIN("1234") - + // When: Verifying with incorrect PIN let result = pinManager.verifyPIN("5678") - + // Then: Should return false XCTAssertFalse(result, "Should return false when verifying incorrect PIN") } - + /// Test PIN verification when no PIN is set func testVerifyPIN_WhenNoPINSet_ReturnsFalse() { // Given: No PIN is set (initial state) XCTAssertFalse(pinManager.isPINSet, "No PIN should be set initially") - + // When: Attempting to verify any PIN let result = pinManager.verifyPIN("1234") - + // Then: Should return false XCTAssertFalse(result, "Should return false when no PIN is set") } - + /// Test PIN verification with edge cases func testVerifyPIN_EdgeCases() { // Test empty PIN pinManager.setPIN("") XCTAssertTrue(pinManager.verifyPIN(""), "Empty PIN should verify correctly") XCTAssertFalse(pinManager.verifyPIN("1234"), "Non-empty PIN should not match empty stored PIN") - + // Test PIN with spaces pinManager.setPIN(" 123 ") XCTAssertTrue(pinManager.verifyPIN(" 123 "), "PIN with spaces should verify correctly") XCTAssertFalse(pinManager.verifyPIN("123"), "PIN without spaces should not match PIN with spaces") } - + // MARK: - PIN Clearing Tests - + /// Test that clearing PIN resets the state func testClearPIN_ResetsState() { // Given: A PIN is set pinManager.setPIN("1234") waitForPINSetUpdate(expectedValue: true) XCTAssertTrue(pinManager.isPINSet, "PIN should be set initially") - + // When: Clearing the PIN pinManager.clearPIN() waitForPINSetUpdate(expectedValue: false) - + // Then: State should be reset XCTAssertFalse(pinManager.isPINSet, "PIN should not be set after clearing") XCTAssertFalse(pinManager.verifyPIN("1234"), "Old PIN should not verify after clearing") } - + /// Test that clearing PIN publishes changes func testClearPIN_PublishesChanges() { // Given: A PIN is set pinManager.setPIN("1234") waitForPINSetUpdate(expectedValue: true) - + let expectation = expectation(description: "isPINSet should be published as false") var finalValue: Bool? - + // Subscribe to changes AFTER the PIN is set, so dropFirst skips the current true value pinManager.$isPINSet .dropFirst() // Skip the current true value @@ -300,48 +299,48 @@ class PINManagerTests: XCTestCase { } } .store(in: &cancellables) - + // When: Clearing the PIN pinManager.clearPIN() - + // Then: Should publish false waitForExpectations(timeout: 1.0) { error in XCTAssertNil(error, "Should not timeout waiting for published change") } - + XCTAssertEqual(finalValue, false, "Should have published isPINSet = false") } - + // MARK: - PIN Resume Requirement Tests - + /// Test setting requirePINOnResume flag func testSetRequirePINOnResume_UpdatesProperty() { // Given: Initial state (should be true by default) XCTAssertTrue(pinManager.requirePINOnResume, "Should require PIN on resume by default") - + // When: Setting to false pinManager.setRequirePINOnResume(false) waitForRequirePINOnResumeUpdate(expectedValue: false) - + // Then: Should be updated XCTAssertFalse(pinManager.requirePINOnResume, "Should not require PIN on resume after setting to false") - + // When: Setting back to true pinManager.setRequirePINOnResume(true) waitForRequirePINOnResumeUpdate(expectedValue: true) - + // Then: Should be updated again XCTAssertTrue(pinManager.requirePINOnResume, "Should require PIN on resume after setting to true") } - + /// Test that requirePINOnResume publishes changes func testSetRequirePINOnResume_PublishesChanges() { // Given: Ensure we start with a known stable state (true) XCTAssertTrue(pinManager.requirePINOnResume, "Should start with requirePINOnResume = true") - + let expectation = expectation(description: "requirePINOnResume should be published") var receivedValue: Bool? - + // Subscribe to requirePINOnResume changes AFTER confirming stable state pinManager.$requirePINOnResume .dropFirst() // Skip the current true value @@ -352,113 +351,113 @@ class PINManagerTests: XCTestCase { } } .store(in: &cancellables) - + // When: Changing the setting from true to false pinManager.setRequirePINOnResume(false) - + // Then: Should receive published change waitForExpectations(timeout: 1.0) { error in XCTAssertNil(error, "Should not timeout waiting for published change") } - + XCTAssertEqual(receivedValue, false, "Should have received requirePINOnResume = false") } - + // MARK: - Last Active Time Tests - + /// Test updating last active time func testUpdateLastActiveTime_UpdatesProperty() { // Given: Initial last active time let initialTime = pinManager.lastActiveTime - + // Wait a small amount to ensure time difference let expectation = expectation(description: "Wait for time to pass") DispatchQueue.main.asyncAfter(deadline: .now() + 0.01) { expectation.fulfill() } waitForExpectations(timeout: 0.1) - + // When: Updating last active time pinManager.updateLastActiveTime() - + // Then: Should be updated to a more recent time XCTAssertGreaterThan(pinManager.lastActiveTime, initialTime, "Last active time should be updated to a more recent time") } - + // MARK: - Integration Tests - + /// Test complete PIN lifecycle: set → verify → clear → verify func testCompletePINLifecycle() { let testPIN = "1234" - + // Initially no PIN XCTAssertFalse(pinManager.isPINSet) XCTAssertFalse(pinManager.verifyPIN(testPIN)) - + // Set PIN pinManager.setPIN(testPIN) waitForPINSetUpdate(expectedValue: true) XCTAssertTrue(pinManager.isPINSet) XCTAssertTrue(pinManager.verifyPIN(testPIN)) XCTAssertFalse(pinManager.verifyPIN("9999")) - + // Clear PIN pinManager.clearPIN() waitForPINSetUpdate(expectedValue: false) XCTAssertFalse(pinManager.isPINSet) XCTAssertFalse(pinManager.verifyPIN(testPIN)) } - + /// Test multiple PIN changes func testMultiplePINChanges() { let pins = ["1111", "2222", "3333"] - + for (index, pin) in pins.enumerated() { // Set new PIN pinManager.setPIN(pin) waitForPINSetUpdate(expectedValue: true) - + // Verify current PIN works XCTAssertTrue(pinManager.verifyPIN(pin), "PIN \(pin) should verify correctly") - + // Verify previous PINs don't work - for previousIndex in 0..( @@ -516,16 +514,16 @@ extension PINManagerTests { line: UInt = #line ) { let expectation = expectation(description: "Wait for published value change") - + publisher .first { $0 == expectedValue } .sink { _ in expectation.fulfill() } .store(in: &cancellables) - + waitForExpectations(timeout: timeout) { error in - if let error = error { + if let error { XCTFail("Timeout waiting for published value \(expectedValue): \(error)", file: file, line: line) } } diff --git a/SnapSafeTests/PhotoDetailViewModelTests.swift b/SnapSafeTests/PhotoDetailViewModelTests.swift deleted file mode 100644 index 7b123a9..0000000 --- a/SnapSafeTests/PhotoDetailViewModelTests.swift +++ /dev/null @@ -1,496 +0,0 @@ -// -// PhotoDetailViewModelTests.swift -// SnapSafeTests -// -// Created by Bill Booth on 5/25/25. -// - -import XCTest -import UIKit -import Combine -@testable import SnapSafe - -class PhotoDetailViewModelTests: XCTestCase { - - private var viewModel: PhotoDetailViewModel! - private var testPhotos: [SecurePhoto]! - private var cancellables: Set! - - override func setUp() { - super.setUp() - testPhotos = createTestPhotos() - cancellables = Set() - } - - override func tearDown() { - cancellables?.removeAll() - cancellables = nil - viewModel = nil - testPhotos = nil - super.tearDown() - } - - // MARK: - Initialization Tests - - /// Tests that PhotoDetailViewModel initializes correctly with a single photo - /// Assertion: Should set up single photo mode with correct initial state - func testInit_WithSinglePhoto_SetsCorrectState() { - let singlePhoto = testPhotos[0] - var deleteCallbackCalled = false - var dismissCallbackCalled = false - - viewModel = PhotoDetailViewModel( - photo: singlePhoto, - showFaceDetection: true, - onDelete: { _ in deleteCallbackCalled = true }, - onDismiss: { dismissCallbackCalled = true } - ) - - XCTAssertTrue(viewModel.showFaceDetection, "Face detection should be enabled") - XCTAssertEqual(viewModel.currentPhoto.id, singlePhoto.id, "Current photo should match provided photo") - XCTAssertTrue(viewModel.allPhotos.isEmpty, "All photos array should be empty in single photo mode") - XCTAssertEqual(viewModel.currentIndex, 0, "Current index should be 0") - XCTAssertFalse(viewModel.canGoToPrevious, "Should not be able to go to previous in single photo mode") - XCTAssertFalse(viewModel.canGoToNext, "Should not be able to go to next in single photo mode") - } - - /// Tests that PhotoDetailViewModel initializes correctly with multiple photos - /// Assertion: Should set up multi-photo mode with correct initial state and navigation capabilities - func testInit_WithMultiplePhotos_SetsCorrectState() { - let initialIndex = 1 - var deleteCallbackCalled = false - var dismissCallbackCalled = false - - viewModel = PhotoDetailViewModel( - allPhotos: testPhotos, - initialIndex: initialIndex, - showFaceDetection: false, - onDelete: { _ in deleteCallbackCalled = true }, - onDismiss: { dismissCallbackCalled = true } - ) - - XCTAssertFalse(viewModel.showFaceDetection, "Face detection should be disabled") - XCTAssertEqual(viewModel.allPhotos.count, testPhotos.count, "All photos should be set correctly") - XCTAssertEqual(viewModel.currentIndex, initialIndex, "Current index should match initial index") - XCTAssertEqual(viewModel.currentPhoto.id, testPhotos[initialIndex].id, "Current photo should match photo at initial index") - XCTAssertTrue(viewModel.canGoToPrevious, "Should be able to go to previous from index 1") - XCTAssertTrue(viewModel.canGoToNext, "Should be able to go to next from index 1") - } - - // MARK: - Navigation Tests - - /// Tests that navigation to previous photo works correctly - /// Assertion: Should update current index and reset UI state when navigating to previous photo - func testNavigateToPrevious_UpdatesStateCorrectly() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 2, showFaceDetection: true) - - let expectation = XCTestExpectation(description: "Navigation should update current index") - - viewModel.$currentIndex - .dropFirst() - .sink { index in - XCTAssertEqual(index, 1, "Current index should be decremented") - expectation.fulfill() - } - .store(in: &cancellables) - - // Set some UI state that should be reset - viewModel.imageRotation = 90 - viewModel.currentScale = 2.0 - viewModel.isFaceDetectionActive = true - - viewModel.navigateToPrevious() - - wait(for: [expectation], timeout: 1.0) - - XCTAssertEqual(viewModel.imageRotation, 0, "Image rotation should be reset") - XCTAssertEqual(viewModel.currentScale, 1.0, "Scale should be reset") - XCTAssertFalse(viewModel.isFaceDetectionActive, "Face detection should be deactivated") - XCTAssertTrue(viewModel.detectedFaces.isEmpty, "Detected faces should be cleared") - XCTAssertNil(viewModel.modifiedImage, "Modified image should be cleared") - } - - /// Tests that navigation to next photo works correctly - /// Assertion: Should update current index and reset UI state when navigating to next photo - func testNavigateToNext_UpdatesStateCorrectly() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: true) - - let expectation = XCTestExpectation(description: "Navigation should update current index") - - viewModel.$currentIndex - .dropFirst() - .sink { index in - XCTAssertEqual(index, 1, "Current index should be incremented") - expectation.fulfill() - } - .store(in: &cancellables) - - // Set some UI state that should be reset - viewModel.imageRotation = 180 - viewModel.dragOffset = CGSize(width: 50, height: 50) - viewModel.detectedFaces = [DetectedFace(bounds: CGRect(x: 0, y: 0, width: 50, height: 50))] - - viewModel.navigateToNext() - - wait(for: [expectation], timeout: 1.0) - - XCTAssertEqual(viewModel.imageRotation, 0, "Image rotation should be reset") - XCTAssertEqual(viewModel.dragOffset, .zero, "Drag offset should be reset") - XCTAssertTrue(viewModel.detectedFaces.isEmpty, "Detected faces should be cleared") - } - - /// Tests that navigation respects boundaries - /// Assertion: Should not navigate beyond array bounds - func testNavigation_RespectsBoundaries() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: false) - - // At index 0, can't go to previous - XCTAssertFalse(viewModel.canGoToPrevious, "Should not be able to go to previous at index 0") - viewModel.navigateToPrevious() - XCTAssertEqual(viewModel.currentIndex, 0, "Index should remain 0 when trying to go to previous") - - // Move to last index - viewModel.currentIndex = testPhotos.count - 1 - - // At last index, can't go to next - XCTAssertFalse(viewModel.canGoToNext, "Should not be able to go to next at last index") - viewModel.navigateToNext() - XCTAssertEqual(viewModel.currentIndex, testPhotos.count - 1, "Index should remain at last position") - } - - // MARK: - Zoom and Pan Tests - - /// Tests that zoom and pan can be reset correctly - /// Assertion: Should reset all zoom and pan related properties to default values - func testResetZoomAndPan_ResetsAllProperties() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: false) - - let expectation = XCTestExpectation(description: "Zoom and pan should reset") - expectation.expectedFulfillmentCount = 4 - - // Set non-default values - viewModel.currentScale = 3.0 - viewModel.dragOffset = CGSize(width: 100, height: 100) - viewModel.lastScale = 3.0 - viewModel.isZoomed = true - viewModel.lastDragPosition = CGSize(width: 50, height: 50) - - // Monitor changes - viewModel.$currentScale.dropFirst().sink { scale in - XCTAssertEqual(scale, 1.0, "Current scale should reset to 1.0") - expectation.fulfill() - }.store(in: &cancellables) - - viewModel.$dragOffset.dropFirst().sink { offset in - XCTAssertEqual(offset, .zero, "Drag offset should reset to zero") - expectation.fulfill() - }.store(in: &cancellables) - - viewModel.$lastScale.dropFirst().sink { scale in - XCTAssertEqual(scale, 1.0, "Last scale should reset to 1.0") - expectation.fulfill() - }.store(in: &cancellables) - - viewModel.$isZoomed.dropFirst().sink { isZoomed in - XCTAssertFalse(isZoomed, "Is zoomed should reset to false") - expectation.fulfill() - }.store(in: &cancellables) - - viewModel.resetZoomAndPan() - - wait(for: [expectation], timeout: 2.0) - - XCTAssertEqual(viewModel.lastDragPosition, .zero, "Last drag position should reset to zero") - } - - // MARK: - Image Rotation Tests - - /// Tests that image rotation works correctly - /// Assertion: Should update rotation angle and reset zoom/pan when rotating - func testRotateImage_UpdatesRotationAndResetsZoom() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: false) - - // Set some zoom state - viewModel.currentScale = 2.0 - viewModel.dragOffset = CGSize(width: 50, height: 50) - - viewModel.rotateImage(direction: 90) - - XCTAssertEqual(viewModel.imageRotation, 90, "Image should be rotated 90 degrees") - XCTAssertEqual(viewModel.currentScale, 1.0, "Scale should be reset when rotating") - XCTAssertEqual(viewModel.dragOffset, .zero, "Drag offset should be reset when rotating") - } - - /// Tests that image rotation normalizes angles correctly - /// Assertion: Should keep rotation within 0-360 degree range - func testRotateImage_NormalizesAngles() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: false) - - // Rotate multiple times to test normalization - viewModel.rotateImage(direction: 90) - viewModel.rotateImage(direction: 90) - viewModel.rotateImage(direction: 90) - viewModel.rotateImage(direction: 90) - - XCTAssertEqual(viewModel.imageRotation, 0, "Rotation should normalize to 0 after 360 degrees") - - // Test negative rotation - viewModel.rotateImage(direction: -90) - XCTAssertEqual(viewModel.imageRotation, 270, "Negative rotation should normalize correctly") - } - - // MARK: - Face Detection Tests - - /// Tests that face detection can be activated and processes correctly - /// Assertion: Should update face detection state and trigger face detection process - func testDetectFaces_ActivatesAndProcesses() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: true) - - let expectation = XCTestExpectation(description: "Face detection should activate") - - viewModel.$isFaceDetectionActive - .dropFirst() - .sink { isActive in - XCTAssertTrue(isActive, "Face detection should be activated") - expectation.fulfill() - } - .store(in: &cancellables) - - viewModel.detectFaces() - - wait(for: [expectation], timeout: 1.0) - - XCTAssertTrue(viewModel.processingFaces, "Should be processing faces initially") - XCTAssertTrue(viewModel.detectedFaces.isEmpty, "Detected faces should be empty initially") - XCTAssertNil(viewModel.modifiedImage, "Modified image should be nil initially") - } - - /// Tests that face selection toggle works correctly - /// Assertion: Should toggle face selection state correctly - func testToggleFaceSelection_WorksCorrectly() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: true) - - let testFace = DetectedFace(bounds: CGRect(x: 10, y: 10, width: 50, height: 50), isSelected: false) - viewModel.detectedFaces = [testFace] - - XCTAssertFalse(testFace.isSelected, "Face should initially be unselected") - XCTAssertFalse(viewModel.hasFacesSelected, "Should not have faces selected initially") - - viewModel.toggleFaceSelection(testFace) - - XCTAssertTrue(viewModel.detectedFaces[0].isSelected, "Face should be selected after toggle") - XCTAssertTrue(viewModel.hasFacesSelected, "Should have faces selected after toggle") - - viewModel.toggleFaceSelection(testFace) - - XCTAssertFalse(viewModel.detectedFaces[0].isSelected, "Face should be unselected after second toggle") - XCTAssertFalse(viewModel.hasFacesSelected, "Should not have faces selected after second toggle") - } - - /// Tests that mask mode selection affects UI text correctly - /// Assertion: Should update action titles and button labels based on selected mask mode - func testMaskModeSelection_UpdatesUIText() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: true) - - let maskModes: [(MaskMode, String, String, String)] = [ - (.blur, "Blur Selected Faces", "blur", "Blur Faces"), - (.pixelate, "Pixelate Selected Faces", "pixelate", "Pixelate Faces"), - (.blackout, "Blackout Selected Faces", "blackout", "Blackout Faces"), - (.noise, "Apply Noise to Selected Faces", "apply noise to", "Apply Noise") - ] - - for (mode, expectedTitle, expectedVerb, expectedButton) in maskModes { - viewModel.selectedMaskMode = mode - - XCTAssertEqual(viewModel.maskActionTitle, expectedTitle, "Action title should match for \(mode)") - XCTAssertEqual(viewModel.maskActionVerb, expectedVerb, "Action verb should match for \(mode)") - XCTAssertEqual(viewModel.maskButtonLabel, expectedButton, "Button label should match for \(mode)") - } - } - - // MARK: - Photo Management Tests - - /// Tests that photo deletion works correctly for single photo - /// Assertion: Should trigger onDelete and onDismiss callbacks for single photo - func testDeletePhoto_SinglePhoto_TriggersCallbacks() { - let singlePhoto = testPhotos[0] - var deletedPhoto: SecurePhoto? - var dismissCalled = false - - viewModel = PhotoDetailViewModel( - photo: singlePhoto, - showFaceDetection: false, - onDelete: { photo in deletedPhoto = photo }, - onDismiss: { dismissCalled = true } - ) - - let expectation = XCTestExpectation(description: "Delete callbacks should be triggered") - expectation.expectedFulfillmentCount = 2 - - // Monitor for callback execution - DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { - if deletedPhoto != nil { expectation.fulfill() } - if dismissCalled { expectation.fulfill() } - } - - viewModel.deletePhoto() - - wait(for: [expectation], timeout: 2.0) - - XCTAssertNotNil(deletedPhoto, "onDelete callback should be called") - XCTAssertEqual(deletedPhoto?.id, singlePhoto.id, "Correct photo should be passed to onDelete") - } - - /// Tests that photo deletion works correctly for multiple photos - /// Assertion: Should update photo array and navigation state correctly - func testDeletePhoto_MultiplePhotos_UpdatesArray() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 1, showFaceDetection: false) - let initialCount = viewModel.allPhotos.count - let photoToDelete = viewModel.currentPhoto - - let expectation = XCTestExpectation(description: "Photo array should be updated") - - viewModel.$allPhotos - .dropFirst() - .sink { photos in - XCTAssertEqual(photos.count, initialCount - 1, "Photo count should decrease by 1") - XCTAssertFalse(photos.contains { $0.id == photoToDelete.id }, "Deleted photo should not be in array") - expectation.fulfill() - } - .store(in: &cancellables) - - viewModel.deletePhoto() - - wait(for: [expectation], timeout: 2.0) - } - - // MARK: - Display Image Tests - - /// Tests that displayedImage returns correct image based on face detection state - /// Assertion: Should return modified image when face detection is active, otherwise full image - func testDisplayedImage_ReturnsCorrectImage() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: true) - - // Initially should return full image - let initialImage = viewModel.displayedImage - XCTAssertNotNil(initialImage, "Should return a valid image") - - // Set modified image and activate face detection - let modifiedImage = createTestImage(size: CGSize(width: 100, height: 100)) - viewModel.modifiedImage = modifiedImage - viewModel.isFaceDetectionActive = true - - let displayedWithModified = viewModel.displayedImage - // Note: We can't directly compare UIImage objects, so we check that it's not nil - XCTAssertNotNil(displayedWithModified, "Should return modified image when face detection is active") - } - - // MARK: - Memory Management Tests - - /// Tests that preloadAdjacentPhotos manages memory correctly - /// Assertion: Should mark adjacent photos as visible for memory management - func testPreloadAdjacentPhotos_ManagesMemoryCorrectly() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 1, showFaceDetection: false) - - // Initially photos should not be marked as visible - XCTAssertFalse(testPhotos[0].isVisible, "Previous photo should not be visible initially") - XCTAssertFalse(testPhotos[2].isVisible, "Next photo should not be visible initially") - - viewModel.preloadAdjacentPhotos() - - // After preloading, adjacent photos should be marked as visible - XCTAssertTrue(testPhotos[0].isVisible, "Previous photo should be marked as visible") - XCTAssertTrue(testPhotos[2].isVisible, "Next photo should be marked as visible") - } - - /// Tests that onAppear properly sets up memory management - /// Assertion: Should mark current photo as visible and register with memory manager - func testOnAppear_SetsUpMemoryManagement() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: false) - - XCTAssertFalse(testPhotos[0].isVisible, "Photo should not be visible initially") - - viewModel.onAppear() - - XCTAssertTrue(testPhotos[0].isVisible, "Current photo should be marked as visible after onAppear") - } - - // MARK: - UI State Tests - - /// Tests that UI state properties can be updated correctly - /// Assertion: Should properly manage all UI state published properties - func testUIStateProperties_UpdateCorrectly() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: true) - - let expectation = XCTestExpectation(description: "UI state should update") - expectation.expectedFulfillmentCount = 8 - - // Monitor state changes - viewModel.$showDeleteConfirmation.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$isSwiping.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$processingFaces.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$showBlurConfirmation.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$showMaskOptions.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$showImageInfo.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$offset.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - viewModel.$imageFrameSize.dropFirst().sink { _ in expectation.fulfill() }.store(in: &cancellables) - - // Update states - viewModel.showDeleteConfirmation = true - viewModel.isSwiping = true - viewModel.processingFaces = true - viewModel.showBlurConfirmation = true - viewModel.showMaskOptions = true - viewModel.showImageInfo = true - viewModel.offset = 100 - viewModel.imageFrameSize = CGSize(width: 300, height: 400) - - wait(for: [expectation], timeout: 2.0) - } - - // MARK: - Sharing Tests - - /// Tests that sharePhoto method doesn't crash when executed - /// Assertion: Should handle sharing functionality without crashing - func testSharePhoto_DoesNotCrash() { - viewModel = PhotoDetailViewModel(allPhotos: testPhotos, initialIndex: 0, showFaceDetection: false) - - // Note: We can't fully test sharing functionality in unit tests since it requires UIKit view controller hierarchy - // But we can test that the method doesn't crash when called - XCTAssertNoThrow(viewModel.sharePhoto(), "Share photo should not crash when called") - } - - // MARK: - Helper Methods - - /// Creates test photos for use in tests - private func createTestPhotos() -> [SecurePhoto] { - let photos = (0..<3).map { index in - let testImage = createTestImage() - let metadata: [String: Any] = [ - "creationDate": Date().timeIntervalSince1970 - Double(index * 3600), - "testPhoto": true, - "index": index - ] - return SecurePhoto( - filename: "test_photo_\(index)", - metadata: metadata, - fileURL: URL(fileURLWithPath: "/tmp/test_\(index).jpg"), - preloadedThumbnail: testImage - ) - } - return photos - } - - /// Creates a test image for use in tests - private func createTestImage(size: CGSize = CGSize(width: 200, height: 200)) -> UIImage { - let renderer = UIGraphicsImageRenderer(size: size) - return renderer.image { context in - context.cgContext.setFillColor(UIColor.blue.cgColor) - context.cgContext.fill(CGRect(origin: .zero, size: size)) - - context.cgContext.setFillColor(UIColor.white.cgColor) - context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.25, y: size.height * 0.25, - width: size.width * 0.5, height: size.height * 0.5)) - } - } -} \ No newline at end of file diff --git a/SnapSafeTests/PhotoFilterTests.swift b/SnapSafeTests/PhotoFilterTests.swift index a1ae179..6e4e876 100644 --- a/SnapSafeTests/PhotoFilterTests.swift +++ b/SnapSafeTests/PhotoFilterTests.swift @@ -5,11 +5,10 @@ // Created by Bill Booth on 5/26/25. // -import XCTest @testable import SnapSafe +import XCTest class PhotoFilterTests: XCTestCase { - func testPhotoFilterCases() { // Test all filter cases exist let allCases = PhotoFilter.allCases @@ -19,7 +18,7 @@ class PhotoFilterTests: XCTestCase { XCTAssertTrue(allCases.contains(.edited)) XCTAssertTrue(allCases.contains(.withLocation)) } - + func testPhotoFilterRawValues() { // Test raw string values XCTAssertEqual(PhotoFilter.all.rawValue, "All Photos") @@ -27,7 +26,7 @@ class PhotoFilterTests: XCTestCase { XCTAssertEqual(PhotoFilter.edited.rawValue, "Edited Photos") XCTAssertEqual(PhotoFilter.withLocation.rawValue, "Photos with Location") } - + func testPhotoFilterSystemImages() { // Test system image names XCTAssertEqual(PhotoFilter.all.systemImage, "photo.stack") @@ -35,4 +34,4 @@ class PhotoFilterTests: XCTestCase { XCTAssertEqual(PhotoFilter.edited.systemImage, "pencil.circle") XCTAssertEqual(PhotoFilter.withLocation.systemImage, "location.circle") } -} \ No newline at end of file +} diff --git a/SnapSafeTests/PhotoMetadataFilteringTests.swift b/SnapSafeTests/PhotoMetadataFilteringTests.swift deleted file mode 100644 index d721075..0000000 --- a/SnapSafeTests/PhotoMetadataFilteringTests.swift +++ /dev/null @@ -1,238 +0,0 @@ -// -// PhotoMetadataFilteringTests.swift -// SnapSafeTests -// -// Created by Bill Booth on 5/26/25. -// - -import XCTest -import CoreLocation -@testable import SnapSafe - -class PhotoMetadataFilteringTests: XCTestCase { - - var testFileManager: SecureFileManager! - - override func setUp() { - super.setUp() - testFileManager = SecureFileManager() - } - - override func tearDown() { - testFileManager = nil - super.tearDown() - } - - // MARK: - GPS Metadata Tests - - func testPhotoWithGPSLatitudeLongitude_ShouldHaveLocation() { - // Create metadata with GPS data including latitude and longitude - let gpsData: [String: Any] = [ - String(kCGImagePropertyGPSLatitude): 37.7749, - String(kCGImagePropertyGPSLatitudeRef): "N", - String(kCGImagePropertyGPSLongitude): -122.4194, - String(kCGImagePropertyGPSLongitudeRef): "W" - ] - - let metadata: [String: Any] = [ - String(kCGImagePropertyGPSDictionary): gpsData - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let hasLocation = hasGPSLocation(photo: photo) - XCTAssertTrue(hasLocation, "Photo with GPS latitude and longitude should be detected as having location") - } - - func testPhotoWithGPSLatitudeOnly_ShouldHaveLocation() { - // Create metadata with GPS data including only latitude - let gpsData: [String: Any] = [ - String(kCGImagePropertyGPSLatitude): 37.7749, - String(kCGImagePropertyGPSLatitudeRef): "N" - ] - - let metadata: [String: Any] = [ - String(kCGImagePropertyGPSDictionary): gpsData - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let hasLocation = hasGPSLocation(photo: photo) - XCTAssertTrue(hasLocation, "Photo with GPS latitude only should be detected as having location") - } - - func testPhotoWithGPSLongitudeOnly_ShouldHaveLocation() { - // Create metadata with GPS data including only longitude - let gpsData: [String: Any] = [ - String(kCGImagePropertyGPSLongitude): -122.4194, - String(kCGImagePropertyGPSLongitudeRef): "W" - ] - - let metadata: [String: Any] = [ - String(kCGImagePropertyGPSDictionary): gpsData - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let hasLocation = hasGPSLocation(photo: photo) - XCTAssertTrue(hasLocation, "Photo with GPS longitude only should be detected as having location") - } - - func testPhotoWithEmptyGPSData_ShouldNotHaveLocation() { - // Create metadata with empty GPS dictionary - let gpsData: [String: Any] = [:] - - let metadata: [String: Any] = [ - String(kCGImagePropertyGPSDictionary): gpsData - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let hasLocation = hasGPSLocation(photo: photo) - XCTAssertFalse(hasLocation, "Photo with empty GPS data should not be detected as having location") - } - - func testPhotoWithoutGPSData_ShouldNotHaveLocation() { - // Create metadata without any GPS data - let metadata: [String: Any] = [ - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let hasLocation = hasGPSLocation(photo: photo) - XCTAssertFalse(hasLocation, "Photo without GPS data should not be detected as having location") - } - - func testPhotoWithInvalidGPSDataType_ShouldNotHaveLocation() { - // Create metadata with invalid GPS data type - let metadata: [String: Any] = [ - String(kCGImagePropertyGPSDictionary): "invalid_gps_data_type" - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let hasLocation = hasGPSLocation(photo: photo) - XCTAssertFalse(hasLocation, "Photo with invalid GPS data type should not be detected as having location") - } - - // MARK: - Edited Photo Tests - - func testPhotoWithEditedFlag_ShouldBeEdited() { - // Create metadata with isEdited flag - let metadata: [String: Any] = [ - "isEdited": true, - "originalFilename": "original_photo_123", - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let isEdited = isEditedPhoto(photo: photo) - XCTAssertTrue(isEdited, "Photo with isEdited flag should be detected as edited") - } - - func testPhotoWithEditedFlagFalse_ShouldNotBeEdited() { - // Create metadata with isEdited flag set to false - let metadata: [String: Any] = [ - "isEdited": false, - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let isEdited = isEditedPhoto(photo: photo) - XCTAssertFalse(isEdited, "Photo with isEdited flag set to false should not be detected as edited") - } - - func testPhotoWithoutEditedFlag_ShouldNotBeEdited() { - // Create metadata without isEdited flag - let metadata: [String: Any] = [ - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let isEdited = isEditedPhoto(photo: photo) - XCTAssertFalse(isEdited, "Photo without isEdited flag should not be detected as edited") - } - - // MARK: - Imported Photo Tests - - func testPhotoWithImportedFlag_ShouldBeImported() { - // Create metadata with imported flag - let metadata: [String: Any] = [ - "imported": true, - "importSource": "PhotosPicker", - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let isImported = isImportedPhoto(photo: photo) - XCTAssertTrue(isImported, "Photo with imported flag should be detected as imported") - } - - func testPhotoWithImportedFlagFalse_ShouldNotBeImported() { - // Create metadata with imported flag set to false - let metadata: [String: Any] = [ - "imported": false, - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let isImported = isImportedPhoto(photo: photo) - XCTAssertFalse(isImported, "Photo with imported flag set to false should not be detected as imported") - } - - func testPhotoWithoutImportedFlag_ShouldNotBeImported() { - // Create metadata without imported flag - let metadata: [String: Any] = [ - "creationDate": Date().timeIntervalSince1970 - ] - - let photo = createTestPhoto(metadata: metadata) - - // Test the filtering logic - let isImported = isImportedPhoto(photo: photo) - XCTAssertFalse(isImported, "Photo without imported flag should not be detected as imported") - } - - // MARK: - Helper Methods - - private func createTestPhoto(metadata: [String: Any]) -> SecurePhoto { - let testImage = UIImage(systemName: "photo") ?? UIImage() - let testURL = URL(fileURLWithPath: "/test/path") - return SecurePhoto(filename: "test_photo", metadata: metadata, fileURL: testURL) - } - - // Extract filtering logic to test it directly (matches SecureGalleryView implementation) - private func hasGPSLocation(photo: SecurePhoto) -> Bool { - guard let gpsData = photo.metadata[String(kCGImagePropertyGPSDictionary)] as? [String: Any] else { return false } - - let hasLatitude = gpsData[String(kCGImagePropertyGPSLatitude)] != nil - let hasLongitude = gpsData[String(kCGImagePropertyGPSLongitude)] != nil - - return hasLatitude || hasLongitude - } - - private func isEditedPhoto(photo: SecurePhoto) -> Bool { - return photo.metadata["isEdited"] as? Bool == true - } - - private func isImportedPhoto(photo: SecurePhoto) -> Bool { - return photo.metadata["imported"] as? Bool == true - } -} \ No newline at end of file diff --git a/SnapSafeTests/SecureFileManagerTests.swift b/SnapSafeTests/SecureFileManagerTests.swift index defa8bd..48f1ee2 100644 --- a/SnapSafeTests/SecureFileManagerTests.swift +++ b/SnapSafeTests/SecureFileManagerTests.swift @@ -5,27 +5,26 @@ // Created by Bill Booth on 5/25/25. // -import XCTest import Foundation -import UIKit @testable import SnapSafe +import UIKit +import XCTest class SecureFileManagerTests: XCTestCase { - private var secureFileManager: SecureFileManager! private var testPhotoData: Data! - + override func setUp() { super.setUp() secureFileManager = SecureFileManager() - + // Create minimal JPEG test data testPhotoData = createTestJPEGData() - + // Clean up any existing test files try? secureFileManager.deleteAllPhotos() } - + override func tearDown() { // Clean up test files after each test try? secureFileManager.deleteAllPhotos() @@ -33,91 +32,91 @@ class SecureFileManagerTests: XCTestCase { testPhotoData = nil super.tearDown() } - + // MARK: - Secure Directory Tests - + /// Tests that getSecureDirectory() creates and returns a valid secure directory /// Assertion: Directory should exist, be within Documents folder, and have backup exclusion func testGetSecureDirectory_CreatesValidSecureDirectory() throws { let secureDirectory = try secureFileManager.getSecureDirectory() - + // Assert directory exists - XCTAssertTrue(FileManager.default.fileExists(atPath: secureDirectory.path), - "Secure directory should exist after creation") - + XCTAssertTrue(FileManager.default.fileExists(atPath: secureDirectory.path), + "Secure directory should exist after creation") + // Assert it's within Documents directory - XCTAssertTrue(secureDirectory.path.contains("Documents/SecurePhotos"), - "Secure directory should be within Documents/SecurePhotos") - + XCTAssertTrue(secureDirectory.path.contains("Documents/SecurePhotos"), + "Secure directory should be within Documents/SecurePhotos") + // Assert backup exclusion attribute is set let resourceValues = try secureDirectory.resourceValues(forKeys: [.isExcludedFromBackupKey]) - XCTAssertTrue(resourceValues.isExcludedFromBackup == true, - "Secure directory should be excluded from backup") + XCTAssertTrue(resourceValues.isExcludedFromBackup == true, + "Secure directory should be excluded from backup") } - + /// Tests that calling getSecureDirectory() multiple times returns the same directory /// Assertion: Multiple calls should return identical URLs without creating duplicates func testGetSecureDirectory_ConsistentResults() throws { let directory1 = try secureFileManager.getSecureDirectory() let directory2 = try secureFileManager.getSecureDirectory() - - XCTAssertEqual(directory1, directory2, - "Multiple calls to getSecureDirectory should return the same URL") + + XCTAssertEqual(directory1, directory2, + "Multiple calls to getSecureDirectory should return the same URL") } - + // MARK: - Photo Saving Tests - + /// Tests that savePhoto() successfully saves photo data and metadata to secure storage /// Assertion: Photo should be saved with valid filename and retrievable data func testSavePhoto_SavesPhotoSuccessfully() throws { let testMetadata = ["testKey": "testValue", "imageWidth": 1024, "imageHeight": 768] as [String: Any] - + let filename = try secureFileManager.savePhoto(testPhotoData, withMetadata: testMetadata) - + // Assert filename is not empty XCTAssertFalse(filename.isEmpty, "Saved photo should have a valid filename") - + // Assert photo can be loaded back let (loadedData, loadedMetadata) = try secureFileManager.loadPhoto(filename: filename) XCTAssertEqual(loadedData, testPhotoData, "Loaded photo data should match original data") - + // Assert metadata includes our test data plus creation date XCTAssertEqual(loadedMetadata["testKey"] as? String, "testValue", "Custom metadata should be preserved") XCTAssertEqual(loadedMetadata["imageWidth"] as? Int, 1024, "Image width metadata should be preserved") XCTAssertNotNil(loadedMetadata["creationDate"], "Creation date should be automatically added") } - + /// Tests that savePhoto() generates unique filenames for concurrent saves /// Assertion: Multiple photos saved in sequence should have unique filenames func testSavePhoto_GeneratesUniqueFilenames() throws { let filename1 = try secureFileManager.savePhoto(testPhotoData) let filename2 = try secureFileManager.savePhoto(testPhotoData) let filename3 = try secureFileManager.savePhoto(testPhotoData) - + XCTAssertNotEqual(filename1, filename2, "Consecutive saves should generate unique filenames") XCTAssertNotEqual(filename2, filename3, "Consecutive saves should generate unique filenames") XCTAssertNotEqual(filename1, filename3, "Consecutive saves should generate unique filenames") - + // Verify all filenames contain timestamp and UUID components XCTAssertTrue(filename1.contains("_"), "Filename should contain timestamp_UUID format") XCTAssertTrue(filename2.contains("_"), "Filename should contain timestamp_UUID format") XCTAssertTrue(filename3.contains("_"), "Filename should contain timestamp_UUID format") } - + /// Tests that savePhoto() properly handles empty photo data /// Assertion: Empty data should be saved without throwing errors func testSavePhoto_HandlesEmptyData() throws { let emptyData = Data() - + XCTAssertNoThrow({ let filename = try self.secureFileManager.savePhoto(emptyData) XCTAssertFalse(filename.isEmpty, "Should generate filename even for empty data") - + let (loadedData, _) = try self.secureFileManager.loadPhoto(filename: filename) XCTAssertEqual(loadedData, emptyData, "Empty data should be preserved") }, "Saving empty photo data should not throw") } - + /// Tests that savePhoto() properly cleans and serializes complex metadata /// Assertion: Non-JSON serializable metadata should be filtered out, valid data preserved func testSavePhoto_CleansComplexMetadata() throws { @@ -131,10 +130,10 @@ class SecureFileManagerTests: XCTestCase { "invalidData": Data([0x01, 0x02, 0x03]), // Should be filtered out "invalidDate": Date(), // Should be filtered out ] - + let filename = try secureFileManager.savePhoto(testPhotoData, withMetadata: complexMetadata) let (_, loadedMetadata) = try secureFileManager.loadPhoto(filename: filename) - + // Assert valid metadata is preserved XCTAssertEqual(loadedMetadata["validString"] as? String, "test") XCTAssertEqual(loadedMetadata["validInt"] as? Int, 42) @@ -142,22 +141,22 @@ class SecureFileManagerTests: XCTestCase { XCTAssertEqual(loadedMetadata["validBool"] as? Bool, true) XCTAssertNotNil(loadedMetadata["validArray"]) XCTAssertNotNil(loadedMetadata["validDict"]) - + // Assert invalid metadata is filtered out XCTAssertNil(loadedMetadata["invalidData"], "Non-JSON serializable data should be filtered out") XCTAssertNil(loadedMetadata["invalidDate"], "Non-JSON serializable date should be filtered out") - + // Assert creation date is still added XCTAssertNotNil(loadedMetadata["creationDate"], "Creation date should always be added") } - + // MARK: - Photo Loading Tests - + /// Tests that loadPhoto() throws appropriate error for non-existent files /// Assertion: Loading non-existent photo should throw file not found error func testLoadPhoto_ThrowsForNonExistentFile() { let nonExistentFilename = "nonexistent_photo_12345" - + XCTAssertThrowsError(try secureFileManager.loadPhoto(filename: nonExistentFilename)) { error in // Assert it's a file not found error let nsError = error as NSError @@ -165,48 +164,48 @@ class SecureFileManagerTests: XCTestCase { XCTAssertEqual(nsError.code, NSFileReadNoSuchFileError, "Should be file not found error") } } - + /// Tests that loadAllPhotoMetadata() returns correct metadata without loading image data /// Assertion: Should return all saved photos with metadata but without heavy image data func testLoadAllPhotoMetadata_ReturnsMetadataWithoutImageData() throws { // Save multiple test photos let filename1 = try secureFileManager.savePhoto(testPhotoData, withMetadata: ["photo": "first"]) let filename2 = try secureFileManager.savePhoto(testPhotoData, withMetadata: ["photo": "second"]) - + let allMetadata = try secureFileManager.loadAllPhotoMetadata() - + XCTAssertEqual(allMetadata.count, 2, "Should return metadata for all saved photos") - + // Assert filenames are present - let filenames = allMetadata.map { $0.filename } + let filenames = allMetadata.map(\.filename) XCTAssertTrue(filenames.contains(filename1), "Should contain first photo filename") XCTAssertTrue(filenames.contains(filename2), "Should contain second photo filename") - + // Assert metadata is loaded for photoInfo in allMetadata { XCTAssertNotNil(photoInfo.metadata["creationDate"], "Each photo should have creation date") XCTAssertNotNil(photoInfo.fileURL, "Each photo should have valid file URL") } } - + /// Tests that loadPhotoThumbnail() generates appropriately sized thumbnails /// Assertion: Thumbnail should be smaller than specified max size func testLoadPhotoThumbnail_GeneratesCorrectSizedThumbnail() throws { let filename = try secureFileManager.savePhoto(testPhotoData) let secureDirectory = try secureFileManager.getSecureDirectory() let fileURL = secureDirectory.appendingPathComponent("\(filename).photo") - + let maxSize: CGFloat = 100 let thumbnail = try secureFileManager.loadPhotoThumbnail(from: fileURL, maxSize: maxSize) - + XCTAssertNotNil(thumbnail, "Should generate thumbnail for valid image data") - - if let thumbnail = thumbnail { + + if let thumbnail { XCTAssertLessThanOrEqual(thumbnail.size.width, maxSize, "Thumbnail width should not exceed maxSize") XCTAssertLessThanOrEqual(thumbnail.size.height, maxSize, "Thumbnail height should not exceed maxSize") } } - + /// Tests that loadPhotoThumbnail() handles invalid image data gracefully /// Assertion: Invalid image data should return nil without throwing func testLoadPhotoThumbnail_HandlesInvalidImageData() throws { @@ -215,14 +214,14 @@ class SecureFileManagerTests: XCTestCase { let filename = try secureFileManager.savePhoto(invalidData) let secureDirectory = try secureFileManager.getSecureDirectory() let fileURL = secureDirectory.appendingPathComponent("\(filename).photo") - + let thumbnail = try secureFileManager.loadPhotoThumbnail(from: fileURL) - + XCTAssertNil(thumbnail, "Should return nil for invalid image data") } - + // MARK: - Photo Deletion Tests - + /// Tests that deletePhoto() removes both photo and metadata files /// Assertion: After deletion, files should not exist and loading should throw error func testDeletePhoto_RemovesBothPhotoAndMetadata() throws { @@ -230,31 +229,31 @@ class SecureFileManagerTests: XCTestCase { let secureDirectory = try secureFileManager.getSecureDirectory() let photoURL = secureDirectory.appendingPathComponent("\(filename).photo") let metadataURL = secureDirectory.appendingPathComponent("\(filename).metadata") - + // Verify files exist before deletion XCTAssertTrue(FileManager.default.fileExists(atPath: photoURL.path), "Photo file should exist before deletion") XCTAssertTrue(FileManager.default.fileExists(atPath: metadataURL.path), "Metadata file should exist before deletion") - + try secureFileManager.deletePhoto(filename: filename) - + // Assert files no longer exist XCTAssertFalse(FileManager.default.fileExists(atPath: photoURL.path), "Photo file should be deleted") XCTAssertFalse(FileManager.default.fileExists(atPath: metadataURL.path), "Metadata file should be deleted") - + // Assert loading the photo now throws error - XCTAssertThrowsError(try secureFileManager.loadPhoto(filename: filename), - "Loading deleted photo should throw error") + XCTAssertThrowsError(try secureFileManager.loadPhoto(filename: filename), + "Loading deleted photo should throw error") } - + /// Tests that deletePhoto() handles non-existent files gracefully /// Assertion: Deleting non-existent photo should not throw error func testDeletePhoto_HandlesNonExistentFiles() { let nonExistentFilename = "nonexistent_photo_98765" - - XCTAssertNoThrow(try secureFileManager.deletePhoto(filename: nonExistentFilename), - "Deleting non-existent photo should not throw error") + + XCTAssertNoThrow(try secureFileManager.deletePhoto(filename: nonExistentFilename), + "Deleting non-existent photo should not throw error") } - + /// Tests that deleteAllPhotos() removes all photos and metadata from secure directory /// Assertion: After deleteAllPhotos(), directory should be empty func testDeleteAllPhotos_RemovesAllFiles() throws { @@ -262,91 +261,91 @@ class SecureFileManagerTests: XCTestCase { try secureFileManager.savePhoto(testPhotoData, withMetadata: ["photo": "1"]) try secureFileManager.savePhoto(testPhotoData, withMetadata: ["photo": "2"]) try secureFileManager.savePhoto(testPhotoData, withMetadata: ["photo": "3"]) - + // Verify photos exist let metadataBeforeDeletion = try secureFileManager.loadAllPhotoMetadata() XCTAssertEqual(metadataBeforeDeletion.count, 3, "Should have 3 photos before deletion") - + try secureFileManager.deleteAllPhotos() - + // Assert all photos are deleted let metadataAfterDeletion = try secureFileManager.loadAllPhotoMetadata() XCTAssertEqual(metadataAfterDeletion.count, 0, "Should have no photos after deleteAllPhotos()") } - + // MARK: - Sharing Tests - + /// Tests that preparePhotoForSharing() creates temporary file with UUID filename /// Assertion: Should create accessible temporary file with unique name func testPreparePhotoForSharing_CreatesTemporaryFile() throws { let tempURL = try secureFileManager.preparePhotoForSharing(imageData: testPhotoData) - + // Assert file is in temporary directory - XCTAssertTrue(tempURL.path.contains("tmp") || tempURL.path.contains("Temporary"), - "Share file should be in temporary directory") - + XCTAssertTrue(tempURL.path.contains("tmp") || tempURL.path.contains("Temporary"), + "Share file should be in temporary directory") + // Assert file exists and contains correct data - XCTAssertTrue(FileManager.default.fileExists(atPath: tempURL.path), - "Temporary share file should exist") - + XCTAssertTrue(FileManager.default.fileExists(atPath: tempURL.path), + "Temporary share file should exist") + let loadedData = try Data(contentsOf: tempURL) XCTAssertEqual(loadedData, testPhotoData, "Temporary file should contain original image data") - + // Assert filename contains UUID pattern (36 characters) let filename = tempURL.lastPathComponent let uuidPart = filename.replacingOccurrences(of: ".jpg", with: "") XCTAssertEqual(uuidPart.count, 36, "Filename should contain UUID (36 characters)") - + // Clean up try? FileManager.default.removeItem(at: tempURL) } - + /// Tests that preparePhotoForSharing() creates unique files for multiple calls /// Assertion: Multiple calls should create different temporary files func testPreparePhotoForSharing_CreatesUniqueFiles() throws { let tempURL1 = try secureFileManager.preparePhotoForSharing(imageData: testPhotoData) let tempURL2 = try secureFileManager.preparePhotoForSharing(imageData: testPhotoData) - + XCTAssertNotEqual(tempURL1, tempURL2, "Multiple calls should create unique temporary files") - + // Clean up try? FileManager.default.removeItem(at: tempURL1) try? FileManager.default.removeItem(at: tempURL2) } - + // MARK: - Edited Photo Saving Tests - + /// Tests that savePhoto() with isEdited flag marks photos correctly /// Assertion: Edited photos should have isEdited metadata and original filename link func testSavePhoto_WithEditedParameters_ShouldSaveCorrectly() throws { let metadata: [String: Any] = ["testKey": "testValue"] - + let filename = try secureFileManager.savePhoto( testPhotoData, withMetadata: metadata, isEdited: true, originalFilename: "original_test_photo" ) - + XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Verify photo was saved by loading it let (loadedData, loadedMetadata) = try secureFileManager.loadPhoto(filename: filename) - + // Verify data integrity XCTAssertEqual(loadedData, testPhotoData, "Loaded photo data should match original") - + // Verify edited metadata was added XCTAssertTrue(loadedMetadata["isEdited"] as? Bool == true, "Photo should be marked as edited") XCTAssertEqual(loadedMetadata["originalFilename"] as? String, "original_test_photo", "Original filename should be preserved") - + // Verify original metadata was preserved XCTAssertEqual(loadedMetadata["testKey"] as? String, "testValue", "Original metadata should be preserved") - + // Verify automatic metadata was added XCTAssertNotNil(loadedMetadata["creationDate"], "Creation date should be added automatically") } - + /// Tests that savePhoto() with isEdited but no original filename works correctly /// Assertion: Should mark as edited without original filename link func testSavePhoto_WithEditedFlagOnly_ShouldSaveWithoutOriginalFilename() throws { @@ -355,34 +354,34 @@ class SecureFileManagerTests: XCTestCase { withMetadata: [:], isEdited: true ) - + XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Verify photo was saved and metadata is correct let (_, loadedMetadata) = try secureFileManager.loadPhoto(filename: filename) - + // Verify edited flag was set XCTAssertTrue(loadedMetadata["isEdited"] as? Bool == true, "Photo should be marked as edited") - + // Verify no original filename is present XCTAssertNil(loadedMetadata["originalFilename"], "Original filename should not be present when not provided") } - + /// Tests that normal photo saving (not edited) doesn't add edited metadata /// Assertion: Normal photos should not have isEdited flags func testSavePhoto_WithoutEditedFlag_ShouldNotHaveEditedMetadata() throws { let filename = try secureFileManager.savePhoto(testPhotoData, withMetadata: [:]) - + XCTAssertFalse(filename.isEmpty, "Filename should not be empty") - + // Verify photo was saved without edited metadata let (_, loadedMetadata) = try secureFileManager.loadPhoto(filename: filename) - + // Verify no edited metadata is present XCTAssertNil(loadedMetadata["isEdited"], "Photo should not have isEdited flag") XCTAssertNil(loadedMetadata["originalFilename"], "Photo should not have originalFilename") } - + /// Tests that multiple edited photos with different originals are tracked separately /// Assertion: Each edited photo should maintain its own original filename link func testSavePhoto_MultipleEditedPhotos_ShouldTrackSeparately() throws { @@ -392,43 +391,43 @@ class SecureFileManagerTests: XCTestCase { isEdited: true, originalFilename: "original_photo_1" ) - + let filename2 = try secureFileManager.savePhoto( testPhotoData, withMetadata: [:], isEdited: true, originalFilename: "original_photo_2" ) - + // Verify both photos were saved with unique filenames XCTAssertNotEqual(filename1, filename2, "Filenames should be unique") - + // Verify first photo metadata let (_, metadata1) = try secureFileManager.loadPhoto(filename: filename1) XCTAssertTrue(metadata1["isEdited"] as? Bool == true) XCTAssertEqual(metadata1["originalFilename"] as? String, "original_photo_1") - + // Verify second photo metadata let (_, metadata2) = try secureFileManager.loadPhoto(filename: filename2) XCTAssertTrue(metadata2["isEdited"] as? Bool == true) XCTAssertEqual(metadata2["originalFilename"] as? String, "original_photo_2") } - + // MARK: - Error Handling Tests - + /// Tests that file operations handle disk space issues gracefully /// Assertion: Should propagate appropriate errors when disk operations fail func testFileOperations_HandleDiskErrors() { // Note: This test is difficult to implement without mocking FileManager // In a real production app, you might use dependency injection to test this - + // For now, we'll test that our methods can handle empty data without crashing - XCTAssertNoThrow(try secureFileManager.savePhoto(Data()), - "Should handle empty data without crashing") + XCTAssertNoThrow(try secureFileManager.savePhoto(Data()), + "Should handle empty data without crashing") } - + // MARK: - Helper Methods - + /// Creates minimal JPEG test data for testing purposes private func createTestJPEGData() -> Data { // Create a minimal 1x1 pixel JPEG image for testing diff --git a/SnapSafeTests/SecurePhotoTests.swift b/SnapSafeTests/SecurePhotoTests.swift deleted file mode 100644 index c72bc3e..0000000 --- a/SnapSafeTests/SecurePhotoTests.swift +++ /dev/null @@ -1,660 +0,0 @@ -// -// SecurePhotoTests.swift -// SnapSafeTests -// -// Created by Bill Booth on 5/25/25. -// - -import XCTest -import UIKit -@testable import SnapSafe - -class SecurePhotoTests: XCTestCase { - - private var testFileURL: URL! - private var testMetadata: [String: Any]! - private var testImage: UIImage! - private var securePhoto: SecurePhoto! - - override func setUp() { - super.setUp() - - // Create test file URL - testFileURL = URL(fileURLWithPath: "/tmp/test_photo.jpg") - - // Create test metadata - testMetadata = [ - "creationDate": Date().timeIntervalSince1970, - "imageWidth": 1920, - "imageHeight": 1080, - "isDecoy": false, - "originalOrientation": 1 - ] - - // Create test image - testImage = createTestImage() - - // Create test SecurePhoto instance - securePhoto = SecurePhoto( - filename: "test_photo_123", - metadata: testMetadata, - fileURL: testFileURL, - preloadedThumbnail: testImage - ) - } - - override func tearDown() { - securePhoto = nil - testImage = nil - testMetadata = nil - testFileURL = nil - super.tearDown() - } - - // MARK: - Initialization Tests - - /// Tests that SecurePhoto initializes with correct properties - /// Assertion: Should set all properties correctly during initialization - func testInit_SetsPropertiesCorrectly() { - let filename = "test_photo_456" - let metadata = ["testKey": "testValue"] - let fileURL = URL(fileURLWithPath: "/tmp/test.jpg") - let thumbnail = createTestImage() - - let photo = SecurePhoto( - filename: filename, - metadata: metadata, - fileURL: fileURL, - preloadedThumbnail: thumbnail - ) - - XCTAssertEqual(photo.filename, filename, "Filename should be set correctly") - XCTAssertEqual(photo.metadata["testKey"] as? String, "testValue", "Metadata should be preserved") - XCTAssertEqual(photo.fileURL, fileURL, "File URL should be set correctly") - XCTAssertNotNil(photo.id, "ID should be generated") - XCTAssertFalse(photo.isVisible, "Should initially be not visible") - } - - /// Tests that legacy initializer works correctly - /// Assertion: Should create SecurePhoto with provided images and metadata - func testLegacyInit_WorksCorrectly() { - let filename = "legacy_photo" - let thumbnail = createTestImage(size: CGSize(width: 100, height: 100)) - let fullImage = createTestImage(size: CGSize(width: 1000, height: 1000)) - let metadata = ["legacy": true] - - let photo = SecurePhoto(filename: filename, thumbnail: thumbnail, fullImage: fullImage, metadata: metadata) - - XCTAssertEqual(photo.filename, filename, "Filename should be set from legacy init") - XCTAssertEqual(photo.metadata["legacy"] as? Bool, true, "Metadata should be preserved") - } - - // MARK: - Equatable Tests - - /// Tests that SecurePhoto equality works correctly - /// Assertion: Should be equal when ID and filename match - func testEquatable_ComparesCorrectly() { - let photo1 = SecurePhoto(filename: "same_photo", metadata: [:], fileURL: testFileURL) - let photo2 = SecurePhoto(filename: "different_photo", metadata: [:], fileURL: testFileURL) - - // Same photo should equal itself - XCTAssertEqual(photo1, photo1, "Photo should equal itself") - - // Different photos should not be equal - XCTAssertNotEqual(photo1, photo2, "Different photos should not be equal") - } - - // MARK: - Decoy Status Tests - - /// Tests that isDecoy property reads from metadata correctly - /// Assertion: Should return false for non-decoy photos and true for decoy photos - func testIsDecoy_ReadsFromMetadataCorrectly() { - // Test false case - XCTAssertFalse(securePhoto.isDecoy, "Should return false when isDecoy is false in metadata") - - // Test true case - securePhoto.metadata["isDecoy"] = true - XCTAssertTrue(securePhoto.isDecoy, "Should return true when isDecoy is true in metadata") - - // Test missing key case - securePhoto.metadata.removeValue(forKey: "isDecoy") - XCTAssertFalse(securePhoto.isDecoy, "Should default to false when isDecoy key is missing") - } - - /// Tests that setDecoyStatus() updates metadata correctly - /// Assertion: Should update metadata with new decoy status - func testSetDecoyStatus_UpdatesMetadata() { - XCTAssertFalse(securePhoto.isDecoy, "Should initially be false") - - securePhoto.setDecoyStatus(true) - - XCTAssertTrue(securePhoto.isDecoy, "Should update to true") - XCTAssertEqual(securePhoto.metadata["isDecoy"] as? Bool, true, "Metadata should be updated") - - securePhoto.setDecoyStatus(false) - - XCTAssertFalse(securePhoto.isDecoy, "Should update back to false") - XCTAssertEqual(securePhoto.metadata["isDecoy"] as? Bool, false, "Metadata should be updated") - } - - // MARK: - Orientation Tests - - /// Tests that originalOrientation reads from metadata correctly - /// Assertion: Should convert EXIF orientation values to UIImage.Orientation correctly - func testOriginalOrientation_ReadsFromMetadata() { - let orientationTestCases: [(Int, UIImage.Orientation)] = [ - (1, .up), - (2, .upMirrored), - (3, .down), - (4, .downMirrored), - (5, .leftMirrored), - (6, .right), - (7, .rightMirrored), - (8, .left) - ] - - for (exifValue, expectedOrientation) in orientationTestCases { - securePhoto.metadata["originalOrientation"] = exifValue - XCTAssertEqual(securePhoto.originalOrientation, expectedOrientation, - "EXIF orientation \(exifValue) should map to \(expectedOrientation)") - } - } - - /// Tests that originalOrientation defaults correctly when metadata is missing - /// Assertion: Should default to .up when orientation metadata is missing - func testOriginalOrientation_DefaultsCorrectly() { - securePhoto.metadata.removeValue(forKey: "originalOrientation") - - XCTAssertEqual(securePhoto.originalOrientation, .up, "Should default to .up when orientation is missing") - } - - /// Tests that originalOrientation handles invalid values gracefully - /// Assertion: Should default to .up for invalid orientation values - func testOriginalOrientation_HandlesInvalidValues() { - // Test values outside valid range (1-8) - securePhoto.metadata["originalOrientation"] = 0 - XCTAssertEqual(securePhoto.originalOrientation, .up, "Should default to .up for orientation value 0") - - securePhoto.metadata["originalOrientation"] = 9 - XCTAssertEqual(securePhoto.originalOrientation, .up, "Should default to .up for orientation value 9") - - securePhoto.metadata["originalOrientation"] = -1 - XCTAssertEqual(securePhoto.originalOrientation, .up, "Should default to .up for negative orientation") - } - - /// Tests that originalOrientation reads from fullImage when metadata is missing - /// Assertion: Should inspect fullImage orientation when metadata unavailable - func testOriginalOrientation_ReadsFromFullImage() { - // Remove orientation metadata - securePhoto.metadata.removeValue(forKey: "originalOrientation") - - // Access originalOrientation which should trigger fullImage inspection - let orientation = securePhoto.originalOrientation - - // Should return a valid orientation (either from image or default) - let validOrientations: [UIImage.Orientation] = [.up, .down, .left, .right, .upMirrored, .downMirrored, .leftMirrored, .rightMirrored] - XCTAssertTrue(validOrientations.contains(orientation), "Should return valid orientation from fullImage or default") - } - - /// Tests that isLandscape property calculates correctly for different orientations - /// Assertion: Should determine landscape vs portrait correctly based on image dimensions and orientation - func testIsLandscape_CalculatesCorrectly() { - // Test cached value - securePhoto.metadata["isLandscape"] = true - XCTAssertTrue(securePhoto.isLandscape, "Should return cached landscape value") - - securePhoto.metadata["isLandscape"] = false - XCTAssertFalse(securePhoto.isLandscape, "Should return cached portrait value") - - // Remove cached value to test calculation - securePhoto.metadata.removeValue(forKey: "isLandscape") - - // Test normal orientation (1) with landscape image - securePhoto.metadata["originalOrientation"] = 1 - // Note: Since we can't easily control the test image dimensions in this context, - // we'll test that the property doesn't crash and returns a valid boolean - let isLandscape = securePhoto.isLandscape - XCTAssertTrue(isLandscape == true || isLandscape == false, "Should return valid boolean") - } - - /// Tests that frameSizeForDisplay calculates correct dimensions - /// Assertion: Should return appropriate width/height based on orientation and cell size - func testFrameSizeForDisplay_CalculatesCorrectDimensions() { - let cellSize: CGFloat = 100 - - // Test with normal orientation - securePhoto.metadata["originalOrientation"] = 1 - let (width, height) = securePhoto.frameSizeForDisplay(cellSize: cellSize) - - XCTAssertGreaterThan(width, 0, "Width should be positive") - XCTAssertGreaterThan(height, 0, "Height should be positive") - - // One dimension should equal cellSize for proper scaling - XCTAssertTrue(width == cellSize || height == cellSize, - "One dimension should equal cellSize for proper scaling") - } - - // MARK: - Memory Management Tests - - /// Tests that visibility tracking works correctly - /// Assertion: Should track visibility state changes - func testVisibilityTracking_WorksCorrectly() { - XCTAssertFalse(securePhoto.isVisible, "Should initially be not visible") - - securePhoto.isVisible = true - XCTAssertTrue(securePhoto.isVisible, "Should be visible when set") - - securePhoto.markAsInvisible() - XCTAssertFalse(securePhoto.isVisible, "Should be invisible after markAsInvisible()") - } - - /// Tests that access time tracking works correctly - /// Assertion: Should update last access time when images are accessed - func testAccessTimeTracking_UpdatesCorrectly() { - let initialAccessTime = securePhoto.timeSinceLastAccess - - // Wait a small amount to ensure time difference - Thread.sleep(forTimeInterval: 0.01) - - // Access thumbnail to update access time - let _ = securePhoto.thumbnail - - let newAccessTime = securePhoto.timeSinceLastAccess - XCTAssertLessThan(newAccessTime, initialAccessTime, - "Access time should be updated when thumbnail is accessed") - } - - /// Tests that clearMemory works correctly - /// Assertion: Should clear cached images while optionally keeping thumbnail - func testClearMemory_WorksCorrectly() { - // Preload images by accessing them - let _ = securePhoto.thumbnail - let _ = securePhoto.fullImage - - // Clear memory keeping thumbnail - securePhoto.clearMemory(keepThumbnail: true) - - // Test that we can still access thumbnail (it should be cached) - let thumbnailAfterClear = securePhoto.thumbnail - XCTAssertNotNil(thumbnailAfterClear, "Thumbnail should still be available when keepThumbnail is true") - - // Clear all memory - securePhoto.clearMemory(keepThumbnail: false) - - // Images should still be accessible (will be reloaded), but this tests the clearing mechanism - let thumbnailAfterFullClear = securePhoto.thumbnail - XCTAssertNotNil(thumbnailAfterFullClear, "Thumbnail should be reloadable after full clear") - } - - // MARK: - Image Loading Tests - - /// Tests that thumbnail loading works with preloaded image - /// Assertion: Should return preloaded thumbnail when available - func testThumbnailLoading_WorksWithPreloadedImage() { - let thumbnail = securePhoto.thumbnail - - XCTAssertNotNil(thumbnail, "Should return valid thumbnail") - XCTAssertTrue(securePhoto.isVisible, "Should mark as visible when thumbnail is accessed") - } - - /// Tests that thumbnail loading handles missing files gracefully - /// Assertion: Should return placeholder image when file cannot be loaded - func testThumbnailLoading_HandlesMissingFiles() { - // Create photo with non-existent file - let missingPhoto = SecurePhoto( - filename: "missing_photo", - metadata: [:], - fileURL: URL(fileURLWithPath: "/nonexistent/path.jpg") - ) - - let thumbnail = missingPhoto.thumbnail - - XCTAssertNotNil(thumbnail, "Should return placeholder for missing file") - // Should be a system image placeholder - XCTAssertNotNil(UIImage(systemName: "photo"), "Placeholder should be available") - } - - /// Tests that fullImage loading handles missing files gracefully - /// Assertion: Should fallback to thumbnail when full image cannot be loaded - func testFullImageLoading_HandlesMissingFiles() { - // Create photo with non-existent file - let missingPhoto = SecurePhoto( - filename: "missing_full_photo", - metadata: [:], - fileURL: URL(fileURLWithPath: "/nonexistent/path.jpg") - ) - - let fullImage = missingPhoto.fullImage - - XCTAssertNotNil(fullImage, "Should return fallback image for missing full image") - XCTAssertTrue(missingPhoto.isVisible, "Should mark as visible when fullImage is accessed") - } - - // MARK: - Metadata Persistence Tests - - /// Tests that setDecoyStatus performs async metadata save - /// Assertion: Should handle metadata saving asynchronously without blocking - func testSetDecoyStatus_PerformsAsyncSave() { - let expectation = XCTestExpectation(description: "Decoy status should be set without blocking") - - // Set decoy status (this triggers async save) - securePhoto.setDecoyStatus(true) - - // Should complete immediately (async operation) - XCTAssertTrue(securePhoto.isDecoy, "Decoy status should be updated immediately") - - // Give async operation time to complete - DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { - expectation.fulfill() - } - - wait(for: [expectation], timeout: 1.0) - } - - // MARK: - Edge Cases Tests - - /// Tests that SecurePhoto handles nil or empty metadata gracefully - /// Assertion: Should work correctly with minimal or missing metadata - func testHandlesEmptyMetadata_Gracefully() { - let photoWithEmptyMetadata = SecurePhoto( - filename: "empty_metadata_photo", - metadata: [:], - fileURL: testFileURL - ) - - XCTAssertFalse(photoWithEmptyMetadata.isDecoy, "Should default decoy to false") - XCTAssertEqual(photoWithEmptyMetadata.originalOrientation, .up, "Should default orientation to up") - XCTAssertNotNil(photoWithEmptyMetadata.thumbnail, "Should provide thumbnail even with empty metadata") - } - - /// Tests that SecurePhoto handles invalid metadata types gracefully - /// Assertion: Should handle type mismatches in metadata without crashing - func testHandlesInvalidMetadataTypes_Gracefully() { - let invalidMetadata: [String: Any] = [ - "isDecoy": "not_a_boolean", // Wrong type - "originalOrientation": "not_an_int", // Wrong type - "isLandscape": 123 // Wrong type - ] - - let photoWithInvalidMetadata = SecurePhoto( - filename: "invalid_metadata_photo", - metadata: invalidMetadata, - fileURL: testFileURL - ) - - // Should handle gracefully and use defaults - XCTAssertFalse(photoWithInvalidMetadata.isDecoy, "Should default to false for invalid decoy type") - XCTAssertEqual(photoWithInvalidMetadata.originalOrientation, .up, "Should default to up for invalid orientation") - } - - /// Tests that memory operations work with concurrent access - /// Assertion: Should handle concurrent memory operations safely - func testConcurrentMemoryOperations_WorkSafely() { - let expectation = XCTestExpectation(description: "Concurrent operations should complete safely") - expectation.expectedFulfillmentCount = 3 - - // Simulate concurrent access from different threads - DispatchQueue.global(qos: .userInitiated).async { - let _ = self.securePhoto.thumbnail - expectation.fulfill() - } - - DispatchQueue.global(qos: .userInitiated).async { - let _ = self.securePhoto.fullImage - expectation.fulfill() - } - - DispatchQueue.global(qos: .userInitiated).async { - self.securePhoto.clearMemory(keepThumbnail: false) - expectation.fulfill() - } - - wait(for: [expectation], timeout: 3.0) - } - - /// Tests that timeSinceLastAccess increases over time - /// Assertion: Should track time accurately - func testTimeSinceLastAccess_IncreasesOverTime() { - // Access the thumbnail to set last access time - let _ = securePhoto.thumbnail - - let initialTime = securePhoto.timeSinceLastAccess - - // Wait a short time - Thread.sleep(forTimeInterval: 0.05) - - let laterTime = securePhoto.timeSinceLastAccess - - XCTAssertGreaterThan(laterTime, initialTime, "Time since last access should increase over time") - } - - /// Tests isLandscape calculation for rotated orientations (5-8) - /// Assertion: Should handle rotated orientations correctly by swapping width/height comparison - func testIsLandscape_HandlesRotatedOrientations() { - // Test rotated orientations (5-8) which swap width/height for landscape calculation - let rotatedOrientations = [5, 6, 7, 8] - - for orientation in rotatedOrientations { - let rotatedPhoto = SecurePhoto( - filename: "rotated_test_\(orientation)", - metadata: ["originalOrientation": orientation], - fileURL: testFileURL, - preloadedThumbnail: testImage - ) - - let isLandscape = rotatedPhoto.isLandscape - XCTAssertTrue(isLandscape == true || isLandscape == false, - "Should calculate valid landscape value for rotated orientation \(orientation)") - } - } - - /// Tests frameSizeForDisplay with different orientation combinations - /// Assertion: Should calculate different dimensions for different orientation/landscape combinations - func testFrameSizeForDisplay_HandlesOrientationCombinations() { - let cellSize: CGFloat = 100 - - // Test case 1: Landscape photo, normal orientation (should use landscape branch) - let landscapePhoto = SecurePhoto( - filename: "landscape_test", - metadata: ["isLandscape": true, "originalOrientation": 1], - fileURL: testFileURL, - preloadedThumbnail: testImage - ) - let (landscapeWidth, _) = landscapePhoto.frameSizeForDisplay(cellSize: cellSize) - XCTAssertEqual(landscapeWidth, cellSize, "Landscape normal orientation should use cellSize for width") - - // Test case 2: Portrait photo, normal orientation (should use portrait branch) - let portraitPhoto = SecurePhoto( - filename: "portrait_test", - metadata: ["isLandscape": false, "originalOrientation": 1], - fileURL: testFileURL, - preloadedThumbnail: testImage - ) - let (_, portraitHeight) = portraitPhoto.frameSizeForDisplay(cellSize: cellSize) - XCTAssertEqual(portraitHeight, cellSize, "Portrait normal orientation should use cellSize for height") - } - - /// Tests setDecoyStatus error handling - /// Assertion: Should handle file system errors gracefully - func testSetDecoyStatus_HandlesErrors() { - // Create photo with invalid file path to trigger error conditions - let invalidPhoto = SecurePhoto( - filename: "invalid_path_photo", - metadata: [:], - fileURL: URL(fileURLWithPath: "/invalid/readonly/path.jpg") - ) - - // Should not crash even if metadata save fails - XCTAssertNoThrow(invalidPhoto.setDecoyStatus(true), - "Should handle metadata save errors gracefully") - - // Metadata should still be updated in memory even if disk save fails - XCTAssertTrue(invalidPhoto.isDecoy, "Should update in-memory metadata even if disk save fails") - } - - /// Tests clearMemory edge cases - /// Assertion: Should handle cases where images are not loaded - func testClearMemory_HandlesEdgeCases() { - // Test clearing memory when no images are loaded - let freshPhoto = SecurePhoto( - filename: "fresh_photo", - metadata: [:], - fileURL: testFileURL - ) - - // Should not crash when clearing memory of unloaded images - XCTAssertNoThrow(freshPhoto.clearMemory(keepThumbnail: true), - "Should not crash when clearing unloaded images") - XCTAssertNoThrow(freshPhoto.clearMemory(keepThumbnail: false), - "Should not crash when clearing unloaded images") - } - - /// Tests handling of nil metadata values - /// Assertion: Should handle nil values in metadata dictionary - func testHandlesNilMetadataValues_Gracefully() { - var metadataWithNils: [String: Any] = [:] - metadataWithNils["isDecoy"] = nil - metadataWithNils["originalOrientation"] = nil - metadataWithNils["isLandscape"] = nil - - let photoWithNils = SecurePhoto( - filename: "nil_metadata_photo", - metadata: metadataWithNils, - fileURL: testFileURL - ) - - // Should handle nil values gracefully - XCTAssertFalse(photoWithNils.isDecoy, "Should default to false for nil decoy value") - XCTAssertEqual(photoWithNils.originalOrientation, .up, "Should default to up for nil orientation") - } - - /// Tests fullImage fallback behavior - /// Assertion: Should fallback to thumbnail when fullImage loading fails - func testFullImage_FallbackBehavior() { - // Create photo that will fail to load full image - let failingPhoto = SecurePhoto( - filename: "failing_photo", - metadata: [:], - fileURL: URL(fileURLWithPath: "/nonexistent/fail.jpg"), - preloadedThumbnail: testImage - ) - - let fullImage = failingPhoto.fullImage - - // Should fallback to thumbnail (which is preloaded) - XCTAssertNotNil(fullImage, "Should return fallback image when full image fails to load") - XCTAssertTrue(failingPhoto.isVisible, "Should mark as visible even when using fallback") - } - - /// Tests thumbnail placeholder behavior - /// Assertion: Should return system placeholder when thumbnail cannot be loaded - func testThumbnail_PlaceholderBehavior() { - // Create photo with no preloaded thumbnail and invalid file path - let placeholderPhoto = SecurePhoto( - filename: "placeholder_photo", - metadata: [:], - fileURL: URL(fileURLWithPath: "/invalid/placeholder.jpg") - ) - - let thumbnail = placeholderPhoto.thumbnail - - // Should return placeholder (system photo icon) - XCTAssertNotNil(thumbnail, "Should return placeholder thumbnail") - XCTAssertTrue(placeholderPhoto.isVisible, "Should mark as visible when accessing placeholder") - } - - /// Tests that both thumbnail and fullImage access update lastAccessTime - /// Assertion: Should update access time for both image types - func testLastAccessTime_UpdatesForBothImageTypes() { - // Use the existing securePhoto with preloaded thumbnail for consistent behavior - let initialTime = securePhoto.timeSinceLastAccess - - // Wait to ensure measurable time difference - Thread.sleep(forTimeInterval: 0.1) - - // Access thumbnail should update access time - let _ = securePhoto.thumbnail - let timeAfterThumbnail = securePhoto.timeSinceLastAccess - - XCTAssertLessThan(timeAfterThumbnail, initialTime, "Thumbnail access should update last access time") - XCTAssertLessThan(timeAfterThumbnail, 0.05, "Thumbnail access should result in very recent access time") - - // Wait longer to ensure measurable time difference - Thread.sleep(forTimeInterval: 0.1) - - // Access full image should update access time again - let _ = securePhoto.fullImage - let timeAfterFullImage = securePhoto.timeSinceLastAccess - - // Verify both operations update the timestamp correctly - XCTAssertLessThan(timeAfterFullImage, 0.05, "Full image access should result in very recent access time") - XCTAssertLessThan(timeAfterFullImage, initialTime, "Full image access should update last access time") - - // Verify the access operations work independently - XCTAssertTrue(securePhoto.isVisible, "Photo should be marked as visible after image access") - } - - /// Tests image caching behavior - /// Assertion: Should cache images after first load and reuse them - func testImageCaching_WorksCorrectly() { - // First thumbnail access should load and cache - let firstThumbnail = securePhoto.thumbnail - - // Second access should use cached version (same instance) - let secondThumbnail = securePhoto.thumbnail - - // Both should be the same cached instance - XCTAssertTrue(firstThumbnail === secondThumbnail, "Should reuse cached thumbnail") - - // Same test for full image - let firstFullImage = securePhoto.fullImage - let secondFullImage = securePhoto.fullImage - - XCTAssertTrue(firstFullImage === secondFullImage, "Should reuse cached full image") - } - - /// Tests concurrent metadata operations - /// Assertion: Should handle concurrent metadata updates safely - func testConcurrentMetadataOperations_WorkSafely() { - let expectation = XCTestExpectation(description: "Concurrent metadata operations should complete safely") - expectation.expectedFulfillmentCount = 4 - - // Simulate concurrent metadata access and updates - DispatchQueue.global(qos: .userInitiated).async { - let _ = self.securePhoto.isDecoy - expectation.fulfill() - } - - DispatchQueue.global(qos: .userInitiated).async { - let _ = self.securePhoto.originalOrientation - expectation.fulfill() - } - - DispatchQueue.global(qos: .userInitiated).async { - self.securePhoto.setDecoyStatus(true) - expectation.fulfill() - } - - DispatchQueue.global(qos: .userInitiated).async { - let _ = self.securePhoto.isLandscape - expectation.fulfill() - } - - wait(for: [expectation], timeout: 3.0) - } - - // MARK: - Helper Methods - - /// Creates a test image for use in tests - private func createTestImage(size: CGSize = CGSize(width: 200, height: 200)) -> UIImage { - let renderer = UIGraphicsImageRenderer(size: size) - return renderer.image { context in - context.cgContext.setFillColor(UIColor.blue.cgColor) - context.cgContext.fill(CGRect(origin: .zero, size: size)) - - context.cgContext.setFillColor(UIColor.white.cgColor) - context.cgContext.fillEllipse(in: CGRect(x: size.width * 0.25, y: size.height * 0.25, - width: size.width * 0.5, height: size.height * 0.5)) - } - } -} diff --git a/SnapSafeTests/SnapSafeTests.swift b/SnapSafeTests/SnapSafeTests.swift index 5df927b..427d512 100644 --- a/SnapSafeTests/SnapSafeTests.swift +++ b/SnapSafeTests/SnapSafeTests.swift @@ -1,16 +1,15 @@ // -// Snap_SafeTests.swift +// SnapSafeTests.swift // SnapSafeTests // // Created by Bill Booth on 5/2/25. // -import XCTest @testable import SnapSafe +import XCTest /// Basic test class to verify test target is working class SnapSafeTests: XCTestCase { - override func setUpWithError() throws { // Put setup code here. This method is called before the invocation of each test method in the class. } @@ -27,9 +26,9 @@ class SnapSafeTests: XCTestCase { func testPerformanceExample() throws { // This is an example of a performance test case. - self.measure { + measure { // Put the code you want to measure the time of here. - let _ = Array(0...1000).map { $0 * 2 } + let _ = Array(0 ... 1000).map { $0 * 2 } } } } diff --git a/SnapSafeUITests/SnapSafeUITests.swift b/SnapSafeUITests/SnapSafeUITests.swift index 752add4..0666492 100644 --- a/SnapSafeUITests/SnapSafeUITests.swift +++ b/SnapSafeUITests/SnapSafeUITests.swift @@ -1,5 +1,5 @@ // -// Snap_SafeUITests.swift +// SnapSafeUITests.swift // SnapSafeUITests // // Created by Bill Booth on 5/2/25. @@ -8,7 +8,6 @@ import XCTest final class Snap_SafeUITests: XCTestCase { - override func setUpWithError() throws { // Put setup code here. This method is called before the invocation of each test method in the class. diff --git a/SnapSafeUITests/SnapSafeUITestsLaunchTests.swift b/SnapSafeUITests/SnapSafeUITestsLaunchTests.swift index 67d8d0f..36f86e1 100644 --- a/SnapSafeUITests/SnapSafeUITestsLaunchTests.swift +++ b/SnapSafeUITests/SnapSafeUITestsLaunchTests.swift @@ -1,5 +1,5 @@ // -// Snap_SafeUITestsLaunchTests.swift +// SnapSafeUITestsLaunchTests.swift // SnapSafeUITests // // Created by Bill Booth on 5/2/25. @@ -8,7 +8,6 @@ import XCTest final class Snap_SafeUITestsLaunchTests: XCTestCase { - override class var runsForEachTargetApplicationUIConfiguration: Bool { true } From d63d47a8e13b1c0e51bb19729256d0038872b567 Mon Sep 17 00:00:00 2001 From: Bill Booth Date: Tue, 24 Jun 2025 22:20:26 -0700 Subject: [PATCH 2/5] Drop the photo filter feature This was too much metadata on images, it wasn't needed. - Removed commented filter state: Deleted the selectedFilter state variable - Removed filter functionality: Eliminated the filteredPhotos computed property and related filtering logic - Updated photo display: Changed from filteredPhotos to photos in the LazyVGrid - Removed filter menu: Deleted the commented "Filter Photos" menu from the toolbar - Deleted filter files: Removed PhotoFilter.swift and PhotoFilterTests.swift completely - Updated photo detail navigation: Fixed the navigation to use photos instead of filteredPhotos --- SnapSafe/Data/PhotoFilter.swift | 28 ------ SnapSafe/Models/PhotoMetadata.swift | 32 +++---- SnapSafe/Repositories/EncryptionManager.swift | 95 ------------------- SnapSafe/Repositories/FileManager.swift | 61 ++++++------ .../Repositories/SecurePhotoRepository.swift | 4 - .../Views/PhotoDetail/PhotoDetailView.swift | 4 - SnapSafe/Views/SecureGalleryView.swift | 40 +------- SnapSafeTests/PhotoFilterTests.swift | 37 -------- 8 files changed, 50 insertions(+), 251 deletions(-) delete mode 100644 SnapSafe/Data/PhotoFilter.swift delete mode 100644 SnapSafe/Repositories/EncryptionManager.swift delete mode 100644 SnapSafeTests/PhotoFilterTests.swift diff --git a/SnapSafe/Data/PhotoFilter.swift b/SnapSafe/Data/PhotoFilter.swift deleted file mode 100644 index 562929e..0000000 --- a/SnapSafe/Data/PhotoFilter.swift +++ /dev/null @@ -1,28 +0,0 @@ -// -// PhotoFilter.swift -// SnapSafe -// -// Created by Bill Booth on 5/26/25. -// - -import Foundation - -enum PhotoFilter: String, CaseIterable { - case all = "All Photos" - case imported = "Imported Photos" - case edited = "Edited Photos" - case withLocation = "Photos with Location" - - var systemImage: String { - switch self { - case .all: - "photo.stack" - case .imported: - "square.and.arrow.down" - case .edited: - "pencil.circle" - case .withLocation: - "location.circle" - } - } -} diff --git a/SnapSafe/Models/PhotoMetadata.swift b/SnapSafe/Models/PhotoMetadata.swift index 8d65841..6ac3583 100644 --- a/SnapSafe/Models/PhotoMetadata.swift +++ b/SnapSafe/Models/PhotoMetadata.swift @@ -27,20 +27,20 @@ struct PhotoMetadata: Codable, Equatable { } } -struct PhotoPredicate { - let dateRange: ClosedRange? - let hasFaces: Bool? - let maskMode: MaskMode? - - init(dateRange: ClosedRange? = nil, hasFaces: Bool? = nil, maskMode: MaskMode? = nil) { - self.dateRange = dateRange - self.hasFaces = hasFaces - self.maskMode = maskMode - } -} +//struct PhotoPredicate { +// let dateRange: ClosedRange? +// let hasFaces: Bool? +// let maskMode: MaskMode? +// +// init(dateRange: ClosedRange? = nil, hasFaces: Bool? = nil, maskMode: MaskMode? = nil) { +// self.dateRange = dateRange +// self.hasFaces = hasFaces +// self.maskMode = maskMode +// } +//} -enum ExportFormat { - case jpeg(quality: CGFloat) - case png - case heic -} +//enum ExportFormat { +// case jpeg(quality: CGFloat) +// case png +// case heic +//} diff --git a/SnapSafe/Repositories/EncryptionManager.swift b/SnapSafe/Repositories/EncryptionManager.swift deleted file mode 100644 index 311ba40..0000000 --- a/SnapSafe/Repositories/EncryptionManager.swift +++ /dev/null @@ -1,95 +0,0 @@ -// -// EncryptionManager.swift -// SnapSafe -// -// Created by Bill Booth on 5/3/25. -// - -import CryptoKit -import Foundation -import Security - -// class EncryptionManager { -// private let keyManager = KeyManagement() -// -// func encryptData(_ data: Data) throws -> Data { -// // Generate a random symmetric key -// let symmetricKey = SymmetricKey(size: .bits256) -// -// // Encrypt the data using AES-GCM -// let nonce = AES.GCM.Nonce() -// let ciphertext = try AES.GCM.seal(data, using: symmetricKey, nonce: nonce) -// -// // Encrypt the symmetric key with the Secure Enclave key -// let encryptedSymmetricKey = try encryptSymmetricKey(symmetricKey) -// -// // Combine everything into a single encrypted package -// var encryptedData = Data() -// encryptedData.append(nonce.withUnsafeBytes { Data($0) }) -// encryptedData.append(encryptedSymmetricKey) -// encryptedData.append(try ciphertext.combined()) -// -// return encryptedData -// } -// -// func decryptData(_ encryptedData: Data) throws -> Data { -// // Extract components from the encrypted package -// let nonceSize = AES.GCM.Nonce.byteCount -// let encryptedKeySize = 256 // Adjust based on your key encryption method -// -// let nonce = encryptedData.prefix(nonceSize) -// let encryptedKey = encryptedData.subdata(in: nonceSize..<(nonceSize + encryptedKeySize)) -// let sealedBox = encryptedData.suffix(from: nonceSize + encryptedKeySize) -// -// // Decrypt the symmetric key -// let symmetricKey = try decryptSymmetricKey(encryptedKey) -// -// // Create AES-GCM seal box -// let nonceObj = try AES.GCM.Nonce(data: nonce) -// let box = try AES.GCM.SealedBox(combined: sealedBox) -// -// // Decrypt the data -// return try AES.GCM.open(box, using: symmetricKey) -// } -// -// private func encryptSymmetricKey(_ key: SymmetricKey) throws -> Data { -// // Get the public key corresponding to the private key in Secure Enclave -// let privateKey = try keyManager.getEncryptionKey() -// guard let publicKey = SecKeyCopyPublicKey(privateKey) else { -// throw NSError(domain: "com.securecamera", code: -1, userInfo: nil) -// } -// -// // Convert SymmetricKey to Data -// let keyData = key.withUnsafeBytes { Data($0) } -// -// // Encrypt the symmetric key with the public key -// guard let encryptedKey = SecKeyCreateEncryptedData( -// publicKey, -// .eciesEncryptionCofactorX963SHA256AESGCM, -// keyData as CFData, -// nil -// ) as Data? else { -// throw NSError(domain: "com.securecamera", code: -2, userInfo: nil) -// } -// -// return encryptedKey -// } -// -// private func decryptSymmetricKey(_ encryptedKey: Data) throws -> SymmetricKey { -// // Get the private key from Secure Enclave -// let privateKey = try keyManager.getEncryptionKey() -// -// // Decrypt the symmetric key -// guard let decryptedKey = SecKeyCreateDecryptedData( -// privateKey, -// .eciesEncryptionCofactorX963SHA256AESGCM, -// encryptedKey as CFData, -// nil -// ) as Data? else { -// throw NSError(domain: "com.securecamera", code: -3, userInfo: nil) -// } -// -// // Convert Data back to SymmetricKey -// return SymmetricKey(data: decryptedKey) -// } -// } diff --git a/SnapSafe/Repositories/FileManager.swift b/SnapSafe/Repositories/FileManager.swift index d583ee5..d7ae5ea 100644 --- a/SnapSafe/Repositories/FileManager.swift +++ b/SnapSafe/Repositories/FileManager.swift @@ -190,37 +190,36 @@ class SecureFileManager { return nil } - // Keep this for compatibility, but don't use it for loading the gallery - func loadAllPhotos() throws -> [(filename: String, data: Data, metadata: [String: Any])] { - let secureDirectory = try getSecureDirectory() - let contents = try fileManager.contentsOfDirectory(at: secureDirectory, includingPropertiesForKeys: nil) - - var photos: [(filename: String, data: Data, metadata: [String: Any])] = [] - - for fileURL in contents { - if fileURL.pathExtension == "photo" { - let filename = fileURL.deletingPathExtension().lastPathComponent - - // Load photo data - let photoData = try Data(contentsOf: fileURL) - - // Try to load metadata if it exists - let metadataURL = secureDirectory.appendingPathComponent("\(filename).metadata") - var metadata: [String: Any] = [:] - - if fileManager.fileExists(atPath: metadataURL.path) { - let metadataData = try Data(contentsOf: metadataURL) - if let loadedMetadata = try JSONSerialization.jsonObject(with: metadataData, options: []) as? [String: Any] { - metadata = loadedMetadata - } - } - - photos.append((filename: filename, data: photoData, metadata: metadata)) - } - } - - return photos - } +// func loadAllPhotos() throws -> [(filename: String, data: Data, metadata: [String: Any])] { +// let secureDirectory = try getSecureDirectory() +// let contents = try fileManager.contentsOfDirectory(at: secureDirectory, includingPropertiesForKeys: nil) +// +// var photos: [(filename: String, data: Data, metadata: [String: Any])] = [] +// +// for fileURL in contents { +// if fileURL.pathExtension == "photo" { +// let filename = fileURL.deletingPathExtension().lastPathComponent +// +// // Load photo data +// let photoData = try Data(contentsOf: fileURL) +// +// // Try to load metadata if it exists +// let metadataURL = secureDirectory.appendingPathComponent("\(filename).metadata") +// var metadata: [String: Any] = [:] +// +// if fileManager.fileExists(atPath: metadataURL.path) { +// let metadataData = try Data(contentsOf: metadataURL) +// if let loadedMetadata = try JSONSerialization.jsonObject(with: metadataData, options: []) as? [String: Any] { +// metadata = loadedMetadata +// } +// } +// +// photos.append((filename: filename, data: photoData, metadata: metadata)) +// } +// } +// +// return photos +// } // Load specific photo by filename func loadPhoto(filename: String) throws -> (data: Data, metadata: [String: Any]) { diff --git a/SnapSafe/Repositories/SecurePhotoRepository.swift b/SnapSafe/Repositories/SecurePhotoRepository.swift index 5a66a4d..d46eafe 100644 --- a/SnapSafe/Repositories/SecurePhotoRepository.swift +++ b/SnapSafe/Repositories/SecurePhotoRepository.swift @@ -129,17 +129,14 @@ class SecurePhoto: Identifiable, Equatable { } } - // Computed property for backward compatibility var isDecoy: Bool { metadata.isDecoy } - // Computed property for backward compatibility - returns cached image if available var fullImage: UIImage { cachedImage ?? thumbnail } - // New repository-compatible initializer init(id: String, encryptedData: Data, metadata: PhotoMetadata, cachedImage: UIImage? = nil, cachedThumbnail: UIImage? = nil) { self.id = id self.encryptedData = encryptedData @@ -148,7 +145,6 @@ class SecurePhoto: Identifiable, Equatable { self.cachedThumbnail = cachedThumbnail } - // Implement Equatable static func == (lhs: SecurePhoto, rhs: SecurePhoto) -> Bool { lhs.id == rhs.id } diff --git a/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift b/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift index 7f2d6ad..4dd459d 100644 --- a/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift +++ b/SnapSafe/Views/PhotoDetail/PhotoDetailView.swift @@ -10,12 +10,9 @@ import ImageIO import SwiftUI import UIKit -// Use a different name to avoid conflicts with the top-level typealias struct PhotoDetailView_Impl: View { - // ViewModel @StateObject private var viewModel: PhotoDetailViewModel - // Environment @Environment(\.dismiss) private var dismiss // Initialize with a single photo @@ -42,7 +39,6 @@ struct PhotoDetailView_Impl: View { var body: some View { GeometryReader { geometry in ZStack { - // Background color Color.black.opacity(0.05) .edgesIgnoringSafeArea(.all) diff --git a/SnapSafe/Views/SecureGalleryView.swift b/SnapSafe/Views/SecureGalleryView.swift index df33298..42568ee 100644 --- a/SnapSafe/Views/SecureGalleryView.swift +++ b/SnapSafe/Views/SecureGalleryView.swift @@ -33,8 +33,6 @@ struct SecureGalleryView: View { @State private var isImporting: Bool = false @State private var importProgress: Float = 0 - // Filter state - @State private var selectedFilter: PhotoFilter = .all // Decoy selection mode @State private var isSelectingDecoys: Bool = false @@ -69,20 +67,6 @@ struct SecureGalleryView: View { // photos.count(where: { $0.isDecoy }) // } - // Computed property to get filtered photos - private var filteredPhotos: [SecurePhoto] { - switch selectedFilter { - case .all: - photos - case .imported: - photos.filter { _ in false } // TODO: Add imported flag to PhotoMetadata - case .edited: - photos.filter { _ in false } // TODO: Add edited flag to PhotoMetadata - case .withLocation: - photos.filter { _ in false } // TODO: Add location data to PhotoMetadata - } - } - // Get an array of selected photos for sharing private var selectedPhotos: [UIImage] { photos @@ -123,7 +107,7 @@ struct SecureGalleryView: View { ) } } - .navigationTitle(isSelectingDecoys ? "Select Decoy Photos" : (selectedFilter == .all ? "Secure Gallery" : selectedFilter.rawValue)) + .navigationTitle(isSelectingDecoys ? "Select Decoy Photos" : "Secure Gallery") .navigationBarTitleDisplayMode(.inline) .navigationBarBackButtonHidden(true) .toolbar { @@ -179,20 +163,6 @@ struct SecureGalleryView: View { isSelecting = true } - Menu("Filter Photos") { - ForEach(PhotoFilter.allCases, id: \.self) { filter in - Button(action: { - selectedFilter = filter - }) { - HStack { - Text(filter.rawValue) - if selectedFilter == filter { - Image(systemName: "checkmark") - } - } - } - } - } } label: { Image(systemName: "ellipsis.circle") .foregroundColor(.blue) @@ -297,9 +267,9 @@ struct SecureGalleryView: View { } .fullScreenCover(item: $selectedPhoto) { photo in // Find the index of the selected photo in the photos array - if let initialIndex = filteredPhotos.firstIndex(where: { $0.id == photo.id }) { + if let initialIndex = photos.firstIndex(where: { $0.id == photo.id }) { EnhancedPhotoDetailView( - allPhotos: filteredPhotos, + allPhotos: photos, initialIndex: initialIndex, showFaceDetection: showFaceDetection, onDelete: { _ in loadPhotos() }, @@ -365,13 +335,11 @@ struct SecureGalleryView: View { ) } -// } - // Photo grid subview private var photosGridView: some View { ScrollView { LazyVGrid(columns: [GridItem(.adaptive(minimum: 100))], spacing: 10) { - ForEach(filteredPhotos) { photo in + ForEach(photos) { photo in PhotoCell( photo: photo, isSelected: selectedPhotoIds.contains(photo.id), diff --git a/SnapSafeTests/PhotoFilterTests.swift b/SnapSafeTests/PhotoFilterTests.swift deleted file mode 100644 index 6e4e876..0000000 --- a/SnapSafeTests/PhotoFilterTests.swift +++ /dev/null @@ -1,37 +0,0 @@ -// -// PhotoFilterTests.swift -// SnapSafeTests -// -// Created by Bill Booth on 5/26/25. -// - -@testable import SnapSafe -import XCTest - -class PhotoFilterTests: XCTestCase { - func testPhotoFilterCases() { - // Test all filter cases exist - let allCases = PhotoFilter.allCases - XCTAssertEqual(allCases.count, 4) - XCTAssertTrue(allCases.contains(.all)) - XCTAssertTrue(allCases.contains(.imported)) - XCTAssertTrue(allCases.contains(.edited)) - XCTAssertTrue(allCases.contains(.withLocation)) - } - - func testPhotoFilterRawValues() { - // Test raw string values - XCTAssertEqual(PhotoFilter.all.rawValue, "All Photos") - XCTAssertEqual(PhotoFilter.imported.rawValue, "Imported Photos") - XCTAssertEqual(PhotoFilter.edited.rawValue, "Edited Photos") - XCTAssertEqual(PhotoFilter.withLocation.rawValue, "Photos with Location") - } - - func testPhotoFilterSystemImages() { - // Test system image names - XCTAssertEqual(PhotoFilter.all.systemImage, "photo.stack") - XCTAssertEqual(PhotoFilter.imported.systemImage, "square.and.arrow.down") - XCTAssertEqual(PhotoFilter.edited.systemImage, "pencil.circle") - XCTAssertEqual(PhotoFilter.withLocation.systemImage, "location.circle") - } -} From a68c489e77d96dcd9154cb2557787454277b27bb Mon Sep 17 00:00:00 2001 From: Bill Booth Date: Thu, 26 Jun 2025 21:13:51 -0700 Subject: [PATCH 3/5] Fix scaling issues in thumbnails --- SnapSafe/Models/PhotoMetadata.swift | 8 ++++---- SnapSafe/Repositories/SecurePhotoRepository.swift | 4 ++-- SnapSafe/Views/PhotoCell.swift | 9 ++------- SnapSafe/Views/SecureGalleryView.swift | 7 +++++-- 4 files changed, 13 insertions(+), 15 deletions(-) diff --git a/SnapSafe/Models/PhotoMetadata.swift b/SnapSafe/Models/PhotoMetadata.swift index 6ac3583..6bfe788 100644 --- a/SnapSafe/Models/PhotoMetadata.swift +++ b/SnapSafe/Models/PhotoMetadata.swift @@ -27,7 +27,7 @@ struct PhotoMetadata: Codable, Equatable { } } -//struct PhotoPredicate { +// struct PhotoPredicate { // let dateRange: ClosedRange? // let hasFaces: Bool? // let maskMode: MaskMode? @@ -37,10 +37,10 @@ struct PhotoMetadata: Codable, Equatable { // self.hasFaces = hasFaces // self.maskMode = maskMode // } -//} +// } -//enum ExportFormat { +// enum ExportFormat { // case jpeg(quality: CGFloat) // case png // case heic -//} +// } diff --git a/SnapSafe/Repositories/SecurePhotoRepository.swift b/SnapSafe/Repositories/SecurePhotoRepository.swift index d46eafe..e56b8d6 100644 --- a/SnapSafe/Repositories/SecurePhotoRepository.swift +++ b/SnapSafe/Repositories/SecurePhotoRepository.swift @@ -150,7 +150,7 @@ class SecurePhoto: Identifiable, Equatable { } } -//enum SecurePhotoError: Error, LocalizedError { +// enum SecurePhotoError: Error, LocalizedError { // case invalidImageData // case decryptionFailed // @@ -162,4 +162,4 @@ class SecurePhoto: Identifiable, Equatable { // "Failed to decrypt image" // } // } -//} +// } diff --git a/SnapSafe/Views/PhotoCell.swift b/SnapSafe/Views/PhotoCell.swift index 9b03d59..0b0a7ff 100644 --- a/SnapSafe/Views/PhotoCell.swift +++ b/SnapSafe/Views/PhotoCell.swift @@ -18,17 +18,12 @@ struct PhotoCell: View { // Track whether this cell is visible in the viewport @State private var isVisible: Bool = false - // Cell size - private let cellSize: CGFloat = 100 - var body: some View { ZStack(alignment: .topTrailing) { - // Photo image that fills the entire cell + // Photo image that maintains aspect ratio Image(uiImage: photo.thumbnail) .resizable() - .aspectRatio(contentMode: .fill) // Use .fill to cover the entire cell - .frame(width: cellSize, height: cellSize) - .clipped() // Clip any overflow + .aspectRatio(contentMode: .fit) // Use .fit to maintain aspect ratio .cornerRadius(10) .onTapGesture(perform: onTap) .overlay( diff --git a/SnapSafe/Views/SecureGalleryView.swift b/SnapSafe/Views/SecureGalleryView.swift index 42568ee..3f969c0 100644 --- a/SnapSafe/Views/SecureGalleryView.swift +++ b/SnapSafe/Views/SecureGalleryView.swift @@ -33,7 +33,6 @@ struct SecureGalleryView: View { @State private var isImporting: Bool = false @State private var importProgress: Float = 0 - // Decoy selection mode @State private var isSelectingDecoys: Bool = false @State private var maxDecoys: Int = 10 @@ -338,7 +337,11 @@ struct SecureGalleryView: View { // Photo grid subview private var photosGridView: some View { ScrollView { - LazyVGrid(columns: [GridItem(.adaptive(minimum: 100))], spacing: 10) { + LazyVGrid(columns: [ + GridItem(.flexible(), spacing: 10), + GridItem(.flexible(), spacing: 10), + GridItem(.flexible(), spacing: 10), + ], spacing: 10) { ForEach(photos) { photo in PhotoCell( photo: photo, From 5d36f791dda15bacf8d54b18e4e722bbc2c9b99a Mon Sep 17 00:00:00 2001 From: Bill Booth Date: Thu, 26 Jun 2025 23:34:03 -0700 Subject: [PATCH 4/5] secure gallery and model refactor --- .../Repositories/SecurePhotoRepository.swift | 38 ++ SnapSafe/Views/SecureGalleryView.swift | 575 +++--------------- SnapSafe/Views/SecureGalleryViewModel.swift | 427 +++++++++++++ 3 files changed, 537 insertions(+), 503 deletions(-) create mode 100644 SnapSafe/Views/SecureGalleryViewModel.swift diff --git a/SnapSafe/Repositories/SecurePhotoRepository.swift b/SnapSafe/Repositories/SecurePhotoRepository.swift index e56b8d6..c558046 100644 --- a/SnapSafe/Repositories/SecurePhotoRepository.swift +++ b/SnapSafe/Repositories/SecurePhotoRepository.swift @@ -150,6 +150,44 @@ class SecurePhoto: Identifiable, Equatable { } } +// MARK: - Repository Extensions for Gallery Operations + +extension SecurePhotoRepository { + // MARK: - Decoy Management + + func updateDecoyStatus(for photoIds: Set, isDecoy: Bool) -> Bool { + // TODO: Implement decoy status update functionality + // This will update the metadata for specified photos to mark them as decoys + print("Updating decoy status for \(photoIds.count) photos to: \(isDecoy)") + return true + } + + func getDecoyPhotos() -> [SecurePhoto] { + // TODO: Implement method to retrieve only decoy photos + // This will filter photos where metadata.isDecoy == true + [] + } + + func validateDecoyLimit(currentSelection: Set, maxDecoys: Int) -> Bool { + currentSelection.count <= maxDecoys + } +} + +// MARK: - Repository Class + +class SecurePhotoRepository: ObservableObject { + static let shared = SecurePhotoRepository() + + private let secureFileManager = SecureFileManager() + + private init() {} + + // MARK: - Core Repository Methods + + // TODO: Implement core repository methods for encrypted photo management + // This will replace the current SecureFileManager usage in the view model +} + // enum SecurePhotoError: Error, LocalizedError { // case invalidImageData // case decryptionFailed diff --git a/SnapSafe/Views/SecureGalleryView.swift b/SnapSafe/Views/SecureGalleryView.swift index 3f969c0..a8375be 100644 --- a/SnapSafe/Views/SecureGalleryView.swift +++ b/SnapSafe/Views/SecureGalleryView.swift @@ -23,23 +23,15 @@ struct EmptyGalleryView: View { // Gallery view to display the stored photos struct SecureGalleryView: View { - @State private var photos: [SecurePhoto] = [] + @StateObject private var viewModel = SecureGalleryViewModel() @State private var selectedPhoto: SecurePhoto? @AppStorage("showFaceDetection") private var showFaceDetection = true // Using AppStorage to share with Settings - @State private var isSelecting: Bool = false - @State private var selectedPhotoIds = Set() - @State private var showDeleteConfirmation = false @State private var pickerItems: [PhotosPickerItem] = [] - @State private var isImporting: Bool = false - @State private var importProgress: Float = 0 // Decoy selection mode @State private var isSelectingDecoys: Bool = false @State private var maxDecoys: Int = 10 - @State private var showDecoyLimitWarning: Bool = false - @State private var showDecoyConfirmation: Bool = false - private let secureFileManager = SecureFileManager() @Environment(\.dismiss) private var dismiss // Callback for dismissing the gallery @@ -56,27 +48,10 @@ struct SecureGalleryView: View { self.onDismiss = onDismiss } - // Computed properties to simplify the view - private var hasSelection: Bool { - !selectedPhotoIds.isEmpty - } - - // Computed property to get current decoy photo count -// private var currentDecoyCount: Int { -// photos.count(where: { $0.isDecoy }) -// } - - // Get an array of selected photos for sharing - private var selectedPhotos: [UIImage] { - photos - .filter { selectedPhotoIds.contains($0.id) } - .map(\.fullImage) - } - var body: some View { ZStack { Group { - if photos.isEmpty { + if viewModel.photos.isEmpty { EmptyGalleryView(onDismiss: { onDismiss?() dismiss() @@ -87,23 +62,8 @@ struct SecureGalleryView: View { } // Import progress overlay - if isImporting { - VStack { - ProgressView("Importing photos...", value: importProgress, total: 1.0) - .progressViewStyle(LinearProgressViewStyle()) - .padding() - - Text("\(Int(importProgress * 100))%") - .font(.caption) - .foregroundColor(.secondary) - } - .frame(width: 200) - .padding() - .background( - RoundedRectangle(cornerRadius: 10) - .fill(Color(.systemBackground)) - .shadow(radius: 5) - ) + if viewModel.isImporting { + importProgressOverlay } } .navigationTitle(isSelectingDecoys ? "Select Decoy Photos" : "Secure Gallery") @@ -116,8 +76,7 @@ struct SecureGalleryView: View { if isSelectingDecoys { // Exit decoy selection mode and return to settings isSelectingDecoys = false - isSelecting = false - selectedPhotoIds.removeAll() + viewModel.cancelSelection() } onDismiss?() dismiss() @@ -135,31 +94,30 @@ struct SecureGalleryView: View { HStack(spacing: 16) { if isSelectingDecoys { // Count label and Save button for decoy selection - Text("\(selectedPhotoIds.count)/\(maxDecoys)") + Text("\(viewModel.selectedPhotoIds.count)/\(maxDecoys)") .font(.caption) - .foregroundColor(selectedPhotoIds.count > maxDecoys ? .red : .secondary) + .foregroundColor(viewModel.selectedPhotoIds.count > maxDecoys ? .red : .secondary) Button("Save") { - if selectedPhotoIds.count > maxDecoys { - showDecoyLimitWarning = true + if !viewModel.validateDecoySelection() { + viewModel.showDecoyLimitWarning = true } else { - showDecoyConfirmation = true + viewModel.showDecoyConfirmation = true } } .foregroundColor(.blue) - .disabled(selectedPhotoIds.isEmpty) - } else if isSelecting { + .disabled(viewModel.selectedPhotoIds.isEmpty) + } else if viewModel.isSelecting { // Cancel selection button Button("Cancel") { - isSelecting = false - selectedPhotoIds.removeAll() + viewModel.cancelSelection() } .foregroundColor(.red) } else { // Context menu with Select and Filter options Menu { Button("Select Photos") { - isSelecting = true + viewModel.startSelection() } } label: { @@ -173,78 +131,22 @@ struct SecureGalleryView: View { .toolbar { // Bottom toolbar with main action buttons ToolbarItemGroup(placement: .bottomBar) { - if !isSelectingDecoys, !isSelecting { - // Normal mode: Import and Refresh buttons + if !isSelectingDecoys, !viewModel.isSelecting { + // Normal mode: Import button PhotosPicker(selection: $pickerItems, matching: .images, photoLibrary: .shared()) { Label("Import", systemImage: "square.and.arrow.down") } .onChange(of: pickerItems) { _, newItems in - // Process selected images from picker - Task { - var hadSuccessfulImport = false - - // Show import progress to user - let importCount = newItems.count - if importCount > 0 { - // Update UI to show import is happening - await MainActor.run { - isImporting = true - importProgress = 0 - } - - print("Importing \(importCount) photos...") - - // Process each selected item with progress tracking - for (index, item) in newItems.enumerated() { - // Update progress - let currentProgress = Float(index) / Float(importCount) - await MainActor.run { - importProgress = currentProgress - } - - // Load and process the image - if let data = try? await item.loadTransferable(type: Data.self) { - // Process this image - await processImportedImageData(data) - hadSuccessfulImport = true - } - } - - // Show 100% progress briefly before hiding - await MainActor.run { - importProgress = 1.0 - } - - // Small delay to show completion - try? await Task.sleep(nanoseconds: 300_000_000) // 0.3 seconds - } - - // After importing all items, reset the picker selection and refresh gallery - await MainActor.run { - // Reset picked items - pickerItems = [] - - // Hide progress indicator - isImporting = false - - // Reload the gallery if we imported images - if hadSuccessfulImport { - loadPhotos() - } - } - } + viewModel.processPhotoImport(from: newItems) + pickerItems = [] } Spacer() - -// Button(action: loadPhotos) { -// Label("Refresh", systemImage: "arrow.clockwise") -// } - } else if isSelecting, hasSelection, !isSelectingDecoys { + } else if viewModel.isSelecting, viewModel.hasSelection, !isSelectingDecoys { // Selection mode: Delete and Share buttons Button(action: { - print("Delete button pressed in gallery view, selected photos: \(selectedPhotoIds.count)") - showDeleteConfirmation = true + print("Delete button pressed in gallery view, selected photos: \(viewModel.selectedPhotoIds.count)") + viewModel.showDeleteConfirmation = true }) { Label("Delete", systemImage: "trash") .foregroundColor(.red) @@ -252,41 +154,39 @@ struct SecureGalleryView: View { Spacer() - Button(action: shareSelectedPhotos) { + Button(action: viewModel.shareSelectedPhotos) { Label("Share", systemImage: "square.and.arrow.up") } } } } - .onAppear(perform: loadPhotos) + .onAppear { + viewModel.loadPhotos() + if isSelectingDecoys { + viewModel.enableDecoySelection() + } + } .onChange(of: selectedPhoto) { _, newValue in if newValue == nil { - loadPhotos() + viewModel.loadPhotos() } } .fullScreenCover(item: $selectedPhoto) { photo in // Find the index of the selected photo in the photos array - if let initialIndex = photos.firstIndex(where: { $0.id == photo.id }) { + if let initialIndex = viewModel.photos.firstIndex(where: { $0.id == photo.id }) { EnhancedPhotoDetailView( - allPhotos: photos, + allPhotos: viewModel.photos, initialIndex: initialIndex, showFaceDetection: showFaceDetection, - onDelete: { _ in loadPhotos() }, - onDismiss: { - // Clean up memory for all loaded full-size images when returning to gallery - for photo in photos { - photo.clearMemory(keepThumbnail: true) - } - // Trigger garbage collection - MemoryManager.shared.checkMemoryUsage() - } + onDelete: { _ in viewModel.loadPhotos() }, + onDismiss: viewModel.cleanupMemory ) } else { // Fallback if photo not found in array PhotoDetailView( photo: photo, showFaceDetection: showFaceDetection, - onDelete: { _ in loadPhotos() }, + onDelete: { _ in viewModel.loadPhotos() }, onDismiss: { photo.clearMemory(keepThumbnail: true) // Trigger garbage collection @@ -296,22 +196,22 @@ struct SecureGalleryView: View { } } .alert( - "Delete Photo\(selectedPhotoIds.count > 1 ? "s" : "")", - isPresented: $showDeleteConfirmation, + "Delete Photo\(viewModel.selectedPhotoIds.count > 1 ? "s" : "")", + isPresented: $viewModel.showDeleteConfirmation, actions: { Button("Cancel", role: .cancel) {} Button("Delete", role: .destructive) { - print("Delete confirmation button pressed, deleting \(selectedPhotoIds.count) photos") - deleteSelectedPhotos() + print("Delete confirmation button pressed, deleting \(viewModel.selectedPhotoIds.count) photos") + viewModel.deleteSelectedPhotos() } }, message: { - Text("Are you sure you want to delete \(selectedPhotoIds.count) photo\(selectedPhotoIds.count > 1 ? "s" : "")? This action cannot be undone.") + Text("Are you sure you want to delete \(viewModel.selectedPhotoIds.count) photo\(viewModel.selectedPhotoIds.count > 1 ? "s" : "")? This action cannot be undone.") } ) .alert( "Too Many Decoys", - isPresented: $showDecoyLimitWarning, + isPresented: $viewModel.showDecoyLimitWarning, actions: { Button("OK", role: .cancel) {} }, @@ -321,7 +221,7 @@ struct SecureGalleryView: View { ) .alert( "Save Decoy Selection", - isPresented: $showDecoyConfirmation, + isPresented: $viewModel.showDecoyConfirmation, actions: { Button("Cancel", role: .cancel) {} Button("Save") { @@ -329,11 +229,13 @@ struct SecureGalleryView: View { } }, message: { - Text("Are you sure you want to save these \(selectedPhotoIds.count) photos as decoys? These will be shown when the emergency PIN is entered.") + Text("Are you sure you want to save these \(viewModel.selectedPhotoIds.count) photos as decoys? These will be shown when the emergency PIN is entered.") } ) } + // MARK: - View Components + // Photo grid subview private var photosGridView: some View { ScrollView { @@ -342,16 +244,16 @@ struct SecureGalleryView: View { GridItem(.flexible(), spacing: 10), GridItem(.flexible(), spacing: 10), ], spacing: 10) { - ForEach(photos) { photo in + ForEach(viewModel.photos) { photo in PhotoCell( photo: photo, - isSelected: selectedPhotoIds.contains(photo.id), - isSelecting: isSelecting, + isSelected: viewModel.selectedPhotoIds.contains(photo.id), + isSelecting: viewModel.isSelecting, onTap: { handlePhotoTap(photo) }, onDelete: { - prepareToDeleteSinglePhoto(photo) + viewModel.prepareToDeleteSinglePhoto(photo) } ) } @@ -359,379 +261,46 @@ struct SecureGalleryView: View { .padding() } } + + // Import progress overlay + private var importProgressOverlay: some View { + VStack { + ProgressView("Importing photos...", value: viewModel.importProgress, total: 1.0) + .progressViewStyle(LinearProgressViewStyle()) + .padding() - // Process image data from the PhotosPicker and save it to the gallery - private func processImportedImageData(_ imageData: Data) async { - // Create metadata including import timestamp - let metadata: [String: Any] = [ - "imported": true, - "importSource": "PhotosPicker", - "creationDate": Date().timeIntervalSince1970, - ] - - // Save the photo data (runs on background thread) - let filename = await withCheckedContinuation { continuation in - DispatchQueue.global(qos: .userInitiated).async { - do { - let filename = try secureFileManager.savePhoto(imageData, withMetadata: metadata) - continuation.resume(returning: filename) - } catch { - print("Error saving imported photo: \(error.localizedDescription)") - continuation.resume(returning: "") - } - } - } - - if !filename.isEmpty { - print("Successfully imported photo: \(filename)") + Text("\(Int(viewModel.importProgress * 100))%") + .font(.caption) + .foregroundColor(.secondary) } + .frame(width: 200) + .padding() + .background( + RoundedRectangle(cornerRadius: 10) + .fill(Color(.systemBackground)) + .shadow(radius: 5) + ) } - // Legacy method for backward compatibility -// private func handleImportedImage() { -// guard let image = importedImage else { return } -// -// // Convert image to data -// guard let imageData = image.jpegData(compressionQuality: 0.8) else { -// print("Failed to convert image to data") -// return -// } -// -// // Process the image data using the new method -// Task { -// await processImportedImageData(imageData) -// -// // Reload photos to show the new one -// DispatchQueue.main.async { -// importedImage = nil -// loadPhotos() -// } -// } -// } - - // MARK: - Action methods - + // MARK: - Action Methods + private func handlePhotoTap(_ photo: SecurePhoto) { - if isSelecting { - togglePhotoSelection(photo) + if viewModel.isSelecting { + viewModel.togglePhotoSelection(photo, isSelectingDecoys: isSelectingDecoys) } else { selectedPhoto = photo } } - private func togglePhotoSelection(_ photo: SecurePhoto) { - if selectedPhotoIds.contains(photo.id) { - selectedPhotoIds.remove(photo.id) - } else { - // If we're selecting decoys and already at the limit, don't allow more selections - if isSelectingDecoys, selectedPhotoIds.count >= maxDecoys { - showDecoyLimitWarning = true - return - } - selectedPhotoIds.insert(photo.id) - } - } - - private func prepareToDeleteSinglePhoto(_ photo: SecurePhoto) { - selectedPhotoIds = [photo.id] - showDeleteConfirmation = true - } - - // Utility function to fix image orientation -// private func fixImageOrientation(_ image: UIImage) -> UIImage { -// // If the orientation is already correct, return the image as is -// if image.imageOrientation == .up { -// return image -// } -// -// // Create a new CGContext with proper orientation -// UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale) -// image.draw(in: CGRect(origin: .zero, size: image.size)) -// let normalizedImage = UIGraphicsGetImageFromCurrentImageContext()! -// UIGraphicsEndImageContext() -// -// return normalizedImage -// } - - private func loadPhotos() { - // Load photos in the background thread to avoid UI blocking - DispatchQueue.global(qos: .userInitiated).async { - do { - // Load metadata and file URLs from legacy system - let photoMetadata = try secureFileManager.loadAllPhotoMetadata() - - // Convert legacy metadata to SecurePhoto objects - var loadedPhotos: [SecurePhoto] = [] - - for (filename, metadataDict, fileURL) in photoMetadata { - do { - // Load the unencrypted photo data from legacy system - let imageData = try Data(contentsOf: fileURL) - - // Convert legacy metadata dictionary to PhotoMetadata struct - let creationDate = Date(timeIntervalSince1970: metadataDict["creationDate"] as? TimeInterval ?? Date().timeIntervalSince1970) - let modificationDate = Date(timeIntervalSince1970: metadataDict["modificationDate"] as? TimeInterval ?? Date().timeIntervalSince1970) - let fileSize = metadataDict["fileSize"] as? Int ?? imageData.count - let isDecoy = metadataDict["isDecoy"] as? Bool ?? false - - // Create PhotoMetadata struct - let metadata = PhotoMetadata( - id: filename, - creationDate: creationDate, - modificationDate: modificationDate, - fileSize: fileSize, - faces: [], // TODO: Load faces from metadata if available - maskMode: .none, // TODO: Load mask mode from metadata if available - isDecoy: isDecoy - ) - - // Create UIImage and generate thumbnail - guard let image = UIImage(data: imageData) else { - print("Invalid image data for \(filename)") - continue - } - - // Generate thumbnail - let thumbnailSize = CGSize(width: 200, height: 200) - let renderer = UIGraphicsImageRenderer(size: thumbnailSize) - let thumbnail = renderer.image { _ in - image.draw(in: CGRect(origin: .zero, size: thumbnailSize)) - } - - // Create SecurePhoto object with cached images (legacy system uses unencrypted data) - let securePhoto = SecurePhoto( - id: filename, - encryptedData: Data(), // Empty since legacy system doesn't encrypt - metadata: metadata, - cachedImage: image, - cachedThumbnail: thumbnail - ) - - loadedPhotos.append(securePhoto) - } catch { - print("Error loading photo \(filename): \(error.localizedDescription)") - } - } - - // Sort by creation date (newest first) - loadedPhotos.sort { $0.metadata.creationDate > $1.metadata.creationDate } - - // Update UI on the main thread - DispatchQueue.main.async { - // First clear memory of existing photos if we're refreshing - MemoryManager.shared.freeAllMemory() - - // Update the photos array - photos = loadedPhotos - - // If in decoy selection mode, pre-select existing decoy photos - if isSelectingDecoys { - // Find and select all photos that are already marked as decoys - for photo in loadedPhotos { - if photo.isDecoy { - selectedPhotoIds.insert(photo.id) - } - } - - // Enable selection mode - isSelecting = true - } - - // Register these photos with the memory manager - MemoryManager.shared.registerPhotos(loadedPhotos) - } - } catch { - print("Error loading photos: \(error.localizedDescription)") - } - } - } - -// private func deletePhoto(_ photo: SecurePhoto) { -// // Perform file deletion in background thread -// DispatchQueue.global(qos: .userInitiated).async { -// do { -// try secureFileManager.deletePhoto(filename: photo.id) -// -// // Update UI on main thread -// DispatchQueue.main.async { -// // Remove from the local array -// withAnimation { -// photos.removeAll { $0.id == photo.id } -// if selectedPhotoIds.contains(photo.id) { -// selectedPhotoIds.remove(photo.id) -// } -// } -// } -// } catch { -// print("Error deleting photo: \(error.localizedDescription)") -// } -// } -// } - - private func deleteSelectedPhotos() { - print("deleteSelectedPhotos() called") - - // Create a local copy of the photos to delete - let photosToDelete = selectedPhotoIds.compactMap { id in - photos.first(where: { $0.id == id }) - } - - print("Will delete \(photosToDelete.count) photos: \(photosToDelete.map(\.id).joined(separator: ", "))") - - // Clear selection and exit selection mode immediately - // for better UI responsiveness - DispatchQueue.main.async { - print("Clearing selection UI state") - selectedPhotoIds.removeAll() - isSelecting = false - } - - // Process deletions in a background queue - DispatchQueue.global(qos: .userInitiated).async { - print("Starting background deletion process") - let group = DispatchGroup() - - // Delete each photo - for photo in photosToDelete { - group.enter() - do { - print("Attempting to delete: \(photo.id)") - try secureFileManager.deletePhoto(filename: photo.id) - print("Successfully deleted: \(photo.id)") - group.leave() - } catch { - print("Error deleting photo \(photo.id): \(error.localizedDescription)") - group.leave() - } - } - - // After all deletions are complete, update the UI - group.notify(queue: .main) { - print("All deletions complete, updating UI") - - // Count photos before removal - let initialCount = photos.count - - // Remove deleted photos from our array - withAnimation { - photos.removeAll { photo in - let shouldRemove = photosToDelete.contains { $0.id == photo.id } - if shouldRemove { - print("Removing photo \(photo.id) from UI") - } - return shouldRemove - } - } - - // Verify removal - let finalCount = photos.count - let removedCount = initialCount - finalCount - print("UI update complete: removed \(removedCount) photos. Gallery now has \(finalCount) photos.") - } - } - } - - // Share selected photos // Save selected photos as decoys private func saveDecoySelections() { - // First, un-mark any previously tagged decoys that aren't currently selected -// for photo in photos { -// let isCurrentlySelected = selectedPhotoIds.contains(photo.id) - - // TODO: Implement decoy status update with new repository pattern - // if photo.isDecoy && !isCurrentlySelected { - // photo.setDecoyStatus(false) - // } - // else if isCurrentlySelected && !photo.isDecoy { - // photo.setDecoyStatus(true) - // } -// } - + viewModel.saveDecoySelections() + // Reset selection and exit decoy mode isSelectingDecoys = false - isSelecting = false - selectedPhotoIds.removeAll() // Return to settings onDismiss?() dismiss() } - - private func shareSelectedPhotos() { - // Get all the selected photos - let images = selectedPhotos - guard !images.isEmpty else { return } - - // Find the root view controller - guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene, - let window = windowScene.windows.first, - let rootViewController = window.rootViewController - else { - print("Could not find root view controller") - return - } - - // Find the presented view controller to present from - var currentController = rootViewController - while let presented = currentController.presentedViewController { - currentController = presented - } - - // Create and prepare temporary files with UUID filenames - var filesToShare: [URL] = [] - - for image in images { - if let imageData = image.jpegData(compressionQuality: 0.9) { - do { - let fileURL = try secureFileManager.preparePhotoForSharing(imageData: imageData) - filesToShare.append(fileURL) - print("Prepared file for sharing: \(fileURL.lastPathComponent)") - } catch { - print("Error preparing photo for sharing: \(error.localizedDescription)") - } - } - } - - // Share files if any were successfully prepared - if !filesToShare.isEmpty { - // Create a UIActivityViewController to share the files - let activityViewController = UIActivityViewController( - activityItems: filesToShare, - applicationActivities: nil - ) - - // For iPad support - if let popover = activityViewController.popoverPresentationController { - popover.sourceView = window - popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) - popover.permittedArrowDirections = [] - } - - // Present the share sheet - DispatchQueue.main.async { - currentController.present(activityViewController, animated: true) { - print("Share sheet presented successfully for \(filesToShare.count) files") - } - } - } else { - // Fallback to sharing just the images if file preparation failed for all - print("Falling back to sharing images directly") - - let activityViewController = UIActivityViewController( - activityItems: images, - applicationActivities: nil - ) - - // For iPad support - if let popover = activityViewController.popoverPresentationController { - popover.sourceView = window - popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) - popover.permittedArrowDirections = [] - } - - DispatchQueue.main.async { - currentController.present(activityViewController, animated: true, completion: nil) - } - } - } -} +} \ No newline at end of file diff --git a/SnapSafe/Views/SecureGalleryViewModel.swift b/SnapSafe/Views/SecureGalleryViewModel.swift new file mode 100644 index 0000000..27fe7e7 --- /dev/null +++ b/SnapSafe/Views/SecureGalleryViewModel.swift @@ -0,0 +1,427 @@ +// +// SecureGalleryViewModel.swift +// SnapSafe +// +// Created by Bill Booth on 6/27/25. +// + +import Foundation +import PhotosUI +import SwiftUI +import UIKit + +@MainActor +class SecureGalleryViewModel: ObservableObject { + // MARK: - Published Properties + + @Published var photos: [SecurePhoto] = [] + @Published var isSelecting: Bool = false + @Published var selectedPhotoIds = Set() + @Published var isImporting: Bool = false + @Published var importProgress: Float = 0 + @Published var showDeleteConfirmation = false + @Published var showDecoyLimitWarning: Bool = false + @Published var showDecoyConfirmation: Bool = false + + // MARK: - Private Properties + + private let secureFileManager = SecureFileManager() + private let maxDecoys: Int = 10 + + // MARK: - Computed Properties + + var hasSelection: Bool { + !selectedPhotoIds.isEmpty + } + + var selectedPhotos: [UIImage] { + photos + .filter { selectedPhotoIds.contains($0.id) } + .map(\.fullImage) + } + + // MARK: - Photo Loading + + func loadPhotos() { + // Load photos in the background thread to avoid UI blocking + DispatchQueue.global(qos: .userInitiated).async { [weak self] in + guard let self else { return } + + do { + // Load metadata and file URLs from legacy system + let photoMetadata = try secureFileManager.loadAllPhotoMetadata() + + // Convert legacy metadata to SecurePhoto objects + var loadedPhotos: [SecurePhoto] = [] + + for (filename, metadataDict, fileURL) in photoMetadata { + do { + // Load the unencrypted photo data from legacy system + let imageData = try Data(contentsOf: fileURL) + + // Convert legacy metadata dictionary to PhotoMetadata struct + let creationDate = Date(timeIntervalSince1970: metadataDict["creationDate"] as? TimeInterval ?? Date().timeIntervalSince1970) + let modificationDate = Date(timeIntervalSince1970: metadataDict["modificationDate"] as? TimeInterval ?? Date().timeIntervalSince1970) + let fileSize = metadataDict["fileSize"] as? Int ?? imageData.count + let isDecoy = metadataDict["isDecoy"] as? Bool ?? false + + // Create PhotoMetadata struct + let metadata = PhotoMetadata( + id: filename, + creationDate: creationDate, + modificationDate: modificationDate, + fileSize: fileSize, + faces: [], // TODO: Load faces from metadata if available + maskMode: .none, // TODO: Load mask mode from metadata if available + isDecoy: isDecoy + ) + + // Create UIImage and generate thumbnail + guard let image = UIImage(data: imageData) else { + print("Invalid image data for \(filename)") + continue + } + + // Generate thumbnail + let thumbnailSize = CGSize(width: 200, height: 200) + let renderer = UIGraphicsImageRenderer(size: thumbnailSize) + let thumbnail = renderer.image { _ in + image.draw(in: CGRect(origin: .zero, size: thumbnailSize)) + } + + // Create SecurePhoto object with cached images (legacy system uses unencrypted data) + let securePhoto = SecurePhoto( + id: filename, + encryptedData: Data(), // Empty since legacy system doesn't encrypt + metadata: metadata, + cachedImage: image, + cachedThumbnail: thumbnail + ) + + loadedPhotos.append(securePhoto) + } catch { + print("Error loading photo \(filename): \(error.localizedDescription)") + } + } + + // Sort by creation date (newest first) + loadedPhotos.sort { $0.metadata.creationDate > $1.metadata.creationDate } + + // Update UI on the main thread + DispatchQueue.main.async { + // First clear memory of existing photos if we're refreshing + MemoryManager.shared.freeAllMemory() + + // Update the photos array + self.photos = loadedPhotos + + // Register these photos with the memory manager + MemoryManager.shared.registerPhotos(loadedPhotos) + } + } catch { + print("Error loading photos: \(error.localizedDescription)") + } + } + } + + // MARK: - Selection Management + + func togglePhotoSelection(_ photo: SecurePhoto, isSelectingDecoys: Bool) { + if selectedPhotoIds.contains(photo.id) { + selectedPhotoIds.remove(photo.id) + } else { + // If we're selecting decoys and already at the limit, don't allow more selections + if isSelectingDecoys, selectedPhotoIds.count >= maxDecoys { + showDecoyLimitWarning = true + return + } + selectedPhotoIds.insert(photo.id) + } + } + + func startSelection() { + isSelecting = true + } + + func cancelSelection() { + isSelecting = false + selectedPhotoIds.removeAll() + } + + func prepareToDeleteSinglePhoto(_ photo: SecurePhoto) { + selectedPhotoIds = [photo.id] + showDeleteConfirmation = true + } + + func enableDecoySelection() { + // Find and select all photos that are already marked as decoys + for photo in photos { + if photo.isDecoy { + selectedPhotoIds.insert(photo.id) + } + } + + // Enable selection mode + isSelecting = true + } + + // MARK: - Import Operations + + func processPhotoImport(from pickerItems: [PhotosPickerItem]) { + Task { + var hadSuccessfulImport = false + + // Show import progress to user + let importCount = pickerItems.count + if importCount > 0 { + // Update UI to show import is happening + await MainActor.run { + isImporting = true + importProgress = 0 + } + + print("Importing \(importCount) photos...") + + // Process each selected item with progress tracking + for (index, item) in pickerItems.enumerated() { + // Update progress + let currentProgress = Float(index) / Float(importCount) + await MainActor.run { + importProgress = currentProgress + } + + // Load and process the image + if let data = try? await item.loadTransferable(type: Data.self) { + // Process this image + await processImportedImageData(data) + hadSuccessfulImport = true + } + } + + // Show 100% progress briefly before hiding + await MainActor.run { + importProgress = 1.0 + } + + // Small delay to show completion + try? await Task.sleep(nanoseconds: 300_000_000) // 0.3 seconds + } + + // After importing all items, reset the import state and refresh gallery + await MainActor.run { + // Hide progress indicator + isImporting = false + + // Reload the gallery if we imported images + if hadSuccessfulImport { + loadPhotos() + } + } + } + } + + private func processImportedImageData(_ imageData: Data) async { + // Create metadata including import timestamp + let metadata: [String: Any] = [ + "imported": true, + "importSource": "PhotosPicker", + "creationDate": Date().timeIntervalSince1970, + ] + + // Save the photo data (runs on background thread) + let filename = await withCheckedContinuation { continuation in + DispatchQueue.global(qos: .userInitiated).async { [weak self] in + guard let self else { + continuation.resume(returning: "") + return + } + + do { + let filename = try secureFileManager.savePhoto(imageData, withMetadata: metadata) + continuation.resume(returning: filename) + } catch { + print("Error saving imported photo: \(error.localizedDescription)") + continuation.resume(returning: "") + } + } + } + + if !filename.isEmpty { + print("Successfully imported photo: \(filename)") + } + } + + // MARK: - Deletion Operations + + func deleteSelectedPhotos() { + print("deleteSelectedPhotos() called") + + // Create a local copy of the photos to delete + let photosToDelete = selectedPhotoIds.compactMap { id in + photos.first(where: { $0.id == id }) + } + + print("Will delete \(photosToDelete.count) photos: \(photosToDelete.map(\.id).joined(separator: ", "))") + + // Clear selection and exit selection mode immediately + // for better UI responsiveness + DispatchQueue.main.async { [weak self] in + print("Clearing selection UI state") + self?.selectedPhotoIds.removeAll() + self?.isSelecting = false + } + + // Process deletions in a background queue + DispatchQueue.global(qos: .userInitiated).async { [weak self] in + guard let self else { return } + + print("Starting background deletion process") + let group = DispatchGroup() + + // Delete each photo + for photo in photosToDelete { + group.enter() + do { + print("Attempting to delete: \(photo.id)") + try secureFileManager.deletePhoto(filename: photo.id) + print("Successfully deleted: \(photo.id)") + group.leave() + } catch { + print("Error deleting photo \(photo.id): \(error.localizedDescription)") + group.leave() + } + } + + // After all deletions are complete, update the UI + group.notify(queue: .main) { [weak self] in + guard let self else { return } + + print("All deletions complete, updating UI") + + // Count photos before removal + let initialCount = photos.count + + // Remove deleted photos from our array + withAnimation { + self.photos.removeAll { photo in + let shouldRemove = photosToDelete.contains { $0.id == photo.id } + if shouldRemove { + print("Removing photo \(photo.id) from UI") + } + return shouldRemove + } + } + + // Verify removal + let finalCount = photos.count + let removedCount = initialCount - finalCount + print("UI update complete: removed \(removedCount) photos. Gallery now has \(finalCount) photos.") + } + } + } + + // MARK: - Sharing Operations + + func shareSelectedPhotos() { + // Get all the selected photos + let images = selectedPhotos + guard !images.isEmpty else { return } + + // Find the root view controller + guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene, + let window = windowScene.windows.first, + let rootViewController = window.rootViewController + else { + print("Could not find root view controller") + return + } + + // Find the presented view controller to present from + var currentController = rootViewController + while let presented = currentController.presentedViewController { + currentController = presented + } + + // Create and prepare temporary files with UUID filenames + var filesToShare: [URL] = [] + + for image in images { + if let imageData = image.jpegData(compressionQuality: 0.9) { + do { + let fileURL = try secureFileManager.preparePhotoForSharing(imageData: imageData) + filesToShare.append(fileURL) + print("Prepared file for sharing: \(fileURL.lastPathComponent)") + } catch { + print("Error preparing photo for sharing: \(error.localizedDescription)") + } + } + } + + // Share files if any were successfully prepared + if !filesToShare.isEmpty { + // Create a UIActivityViewController to share the files + let activityViewController = UIActivityViewController( + activityItems: filesToShare, + applicationActivities: nil + ) + + // For iPad support + if let popover = activityViewController.popoverPresentationController { + popover.sourceView = window + popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) + popover.permittedArrowDirections = [] + } + + // Present the share sheet + DispatchQueue.main.async { + currentController.present(activityViewController, animated: true) { + print("Share sheet presented successfully for \(filesToShare.count) files") + } + } + } else { + // Fallback to sharing just the images if file preparation failed for all + print("Falling back to sharing images directly") + + let activityViewController = UIActivityViewController( + activityItems: images, + applicationActivities: nil + ) + + // For iPad support + if let popover = activityViewController.popoverPresentationController { + popover.sourceView = window + popover.sourceRect = CGRect(x: window.bounds.midX, y: window.bounds.midY, width: 0, height: 0) + popover.permittedArrowDirections = [] + } + + DispatchQueue.main.async { + currentController.present(activityViewController, animated: true, completion: nil) + } + } + } + + // MARK: - Decoy Management + + func validateDecoySelection() -> Bool { + selectedPhotoIds.count <= maxDecoys + } + + func saveDecoySelections() { + // TODO: Implement decoy status update with new repository pattern + // This will be implemented when we extend SecurePhotoRepository + + // For now, just reset selection state + selectedPhotoIds.removeAll() + } + + // MARK: - Cleanup + + func cleanupMemory() { + // Clean up memory for all loaded full-size images when returning to gallery + for photo in photos { + photo.clearMemory(keepThumbnail: true) + } + // Trigger garbage collection + MemoryManager.shared.checkMemoryUsage() + } +} From dcdace1b699504a93821dfde31c203b28c403891 Mon Sep 17 00:00:00 2001 From: Bill Booth Date: Mon, 1 Sep 2025 14:41:57 -0700 Subject: [PATCH 5/5] dumping ground This was a huge in-progress changeset, not sure if it's working but there are a lot of good little changes here. --- README.md | 14 ++ SnapSafe.xcodeproj/project.pbxproj | 167 +++++++++------ .../xcshareddata/xcschemes/SnapSafe.xcscheme | 6 +- .../xcschemes/xcschememanagement.plist | 5 + ...AuthManager.swift => AuthRepository.swift} | 2 +- .../{PINManager.swift => PINRepository.swift} | 2 +- .../Repositories/PrivacyOverlayManager.swift | 176 ++++++++++++++++ .../Repositories/SecurePhotoRepository.swift | 190 ++++++++++-------- SnapSafe/SnapSafeApp.swift | 3 + SnapSafe/Views/ContentView.swift | 4 - SnapSafe/Views/PhotoCell.swift | 5 +- .../PhotoDetail/PhotoDetailViewModel.swift | 2 +- SnapSafe/Views/PrivacyShieldView.swift | 53 ++++- SnapSafe/Views/SecureGalleryView.swift | 10 +- SnapSafe/Views/SecureGalleryViewModel.swift | 19 +- .../Shared/MockSecureImageRepository.swift | 95 +++++++++ SnapSafeTests/SnapSafeTests.swift | 62 ++++++ 17 files changed, 632 insertions(+), 183 deletions(-) rename SnapSafe/Repositories/{AuthManager.swift => AuthRepository.swift} (99%) rename SnapSafe/Repositories/{PINManager.swift => PINRepository.swift} (99%) create mode 100644 SnapSafe/Repositories/PrivacyOverlayManager.swift create mode 100644 SnapSafeTests/Shared/MockSecureImageRepository.swift diff --git a/README.md b/README.md index ae8cc01..89dfc28 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,20 @@ The camera app that minds its own business. [![CodeQL Advanced](https://github.com/SecureCamera/SecureCameraIos/actions/workflows/codeql.yml/badge.svg)](https://github.com/SecureCamera/SecureCameraIos/actions/workflows/codeql.yml) [![codebeat badge](https://codebeat.co/badges/98126bc1-7ae9-4aed-be5c-21875c1999a1)](https://codebeat.co/projects/github-com-securecamera-securecameraios-main) +# Build Info + +Run periphery: + +``` +periphery scan --format xcode --strict --project SnapSafe/SnapSafe.xcodeproj --schemes SnapSafe +``` + +Resolve package graph: + +``` +xcodebuild -list +``` + # Recommended iOS Settings Apple provides a number of security features we can use on our devices to ensure the device is as secure as possible. This section outlines settings you can use to protect your device. diff --git a/SnapSafe.xcodeproj/project.pbxproj b/SnapSafe.xcodeproj/project.pbxproj index c25330c..82c2c50 100644 --- a/SnapSafe.xcodeproj/project.pbxproj +++ b/SnapSafe.xcodeproj/project.pbxproj @@ -6,12 +6,26 @@ objectVersion = 77; objects = { +/* Begin PBXAggregateTarget section */ + A93DD0432DF7F3CB00D5DB42 /* Periphery */ = { + isa = PBXAggregateTarget; + buildConfigurationList = A93DD0442DF7F3CB00D5DB42 /* Build configuration list for PBXAggregateTarget "Periphery" */; + buildPhases = ( + A93DD0472DF7F3FA00D5DB42 /* ShellScript */, + ); + dependencies = ( + ); + name = Periphery; + packageProductDependencies = ( + ); + productName = Periphery; + }; +/* End PBXAggregateTarget section */ + /* Begin PBXBuildFile section */ A91DBC542DE58191001F42ED /* AppearanceMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC252DE58191001F42ED /* AppearanceMode.swift */; }; A91DBC552DE58191001F42ED /* DetectedFace.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC262DE58191001F42ED /* DetectedFace.swift */; }; A91DBC562DE58191001F42ED /* MaskMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC272DE58191001F42ED /* MaskMode.swift */; }; - A91DBC572DE58191001F42ED /* MemoryManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC282DE58191001F42ED /* MemoryManager.swift */; }; - A91DBC592DE58191001F42ED /* SecurePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC2A2DE58191001F42ED /* SecurePhoto.swift */; }; A91DBC5A2DE58191001F42ED /* FaceBoxView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC2E2DE58191001F42ED /* FaceBoxView.swift */; }; A91DBC5B2DE58191001F42ED /* FaceDetectionControlsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC2F2DE58191001F42ED /* FaceDetectionControlsView.swift */; }; A91DBC5C2DE58191001F42ED /* FaceDetectionOverlay.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC302DE58191001F42ED /* FaceDetectionOverlay.swift */; }; @@ -26,29 +40,26 @@ A91DBC652DE58191001F42ED /* PhotoDetailViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC3B2DE58191001F42ED /* PhotoDetailViewModel.swift */; }; A91DBC662DE58191001F42ED /* AppStateCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC3E2DE58191001F42ED /* AppStateCoordinator.swift */; }; A91DBC672DE58191001F42ED /* AuthenticationOverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC402DE58191001F42ED /* AuthenticationOverlayView.swift */; }; - A91DBC682DE58191001F42ED /* AuthManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC412DE58191001F42ED /* AuthManager.swift */; }; - A91DBC692DE58191001F42ED /* CamControl.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC422DE58191001F42ED /* CamControl.swift */; }; A91DBC6A2DE58191001F42ED /* CameraModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC432DE58191001F42ED /* CameraModel.swift */; }; A91DBC6B2DE58191001F42ED /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC442DE58191001F42ED /* ContentView.swift */; }; - A91DBC6C2DE58191001F42ED /* EncryptionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC452DE58191001F42ED /* EncryptionManager.swift */; }; - A91DBC6D2DE58191001F42ED /* FaceDetector.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC462DE58191001F42ED /* FaceDetector.swift */; }; - A91DBC6E2DE58191001F42ED /* FileManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC472DE58191001F42ED /* FileManager.swift */; }; - A91DBC6F2DE58191001F42ED /* KeyManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC482DE58191001F42ED /* KeyManager.swift */; }; - A91DBC702DE58191001F42ED /* LocationManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC492DE58191001F42ED /* LocationManager.swift */; }; A91DBC712DE58191001F42ED /* PhotoDetailViewImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4A2DE58191001F42ED /* PhotoDetailViewImpl.swift */; }; - A91DBC722DE58191001F42ED /* PINManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4B2DE58191001F42ED /* PINManager.swift */; }; A91DBC732DE58191001F42ED /* PINSetupView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4C2DE58191001F42ED /* PINSetupView.swift */; }; A91DBC742DE58191001F42ED /* PINVerificationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4D2DE58191001F42ED /* PINVerificationView.swift */; }; - A91DBC752DE58191001F42ED /* PrivacyShield.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4E2DE58191001F42ED /* PrivacyShield.swift */; }; - A91DBC762DE58191001F42ED /* ScreenCaptureManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4F2DE58191001F42ED /* ScreenCaptureManager.swift */; }; + A91DBC752DE58191001F42ED /* PrivacyShieldView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC4E2DE58191001F42ED /* PrivacyShieldView.swift */; }; A91DBC772DE58191001F42ED /* SecureGalleryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC502DE58191001F42ED /* SecureGalleryView.swift */; }; A91DBC782DE58191001F42ED /* SettingsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC512DE58191001F42ED /* SettingsView.swift */; }; A91DBC792DE58191001F42ED /* SnapSafeApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = A91DBC522DE58191001F42ED /* SnapSafeApp.swift */; }; A91DBC7A2DE58191001F42ED /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A91DBC2C2DE58191001F42ED /* Preview Assets.xcassets */; }; A91DBC7B2DE58191001F42ED /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A91DBC3F2DE58191001F42ED /* Assets.xcassets */; }; - A9445CBB2DE7DD7B0038119B /* PhotoMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9445CBA2DE7DD7B0038119B /* PhotoMetadata.swift */; }; + A93DD0422DF7EFA900D5DB42 /* PhotoMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = A93DD0412DF7EFA900D5DB42 /* PhotoMetadata.swift */; }; A9B3E0882DCF1D3C003F1ED3 /* Dependencies in Frameworks */ = {isa = PBXBuildFile; productRef = A9B3E0872DCF1D3C003F1ED3 /* Dependencies */; }; A9B3E08A2DCF1D3C003F1ED3 /* DependenciesTestSupport in Frameworks */ = {isa = PBXBuildFile; productRef = A9B3E0892DCF1D3C003F1ED3 /* DependenciesTestSupport */; }; + A9DEA9AD2DF94EAD0064A95F /* ScreenshotTakenView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9DEA9AC2DF94EAD0064A95F /* ScreenshotTakenView.swift */; }; + A9DEA9AF2DF94EE50064A95F /* ScreenRecordingBlockerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9DEA9AE2DF94EE50064A95F /* ScreenRecordingBlockerView.swift */; }; + A9DEA9B12DF956610064A95F /* PhotoCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9DEA9B02DF956610064A95F /* PhotoCell.swift */; }; + A9DEA9B52DF9579D0064A95F /* FocusIndicatorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9DEA9B42DF9579D0064A95F /* FocusIndicatorView.swift */; }; + A9DEA9B72DF957B50064A95F /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9DEA9B62DF957B50064A95F /* CameraView.swift */; }; + A9DEAA932E0E71FB0064A95F /* SecureGalleryViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9DEAA922E0E71FB0064A95F /* SecureGalleryViewModel.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -73,8 +84,6 @@ A91DBC252DE58191001F42ED /* AppearanceMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppearanceMode.swift; sourceTree = ""; }; A91DBC262DE58191001F42ED /* DetectedFace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetectedFace.swift; sourceTree = ""; }; A91DBC272DE58191001F42ED /* MaskMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MaskMode.swift; sourceTree = ""; }; - A91DBC282DE58191001F42ED /* MemoryManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MemoryManager.swift; sourceTree = ""; }; - A91DBC2A2DE58191001F42ED /* SecurePhoto.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecurePhoto.swift; sourceTree = ""; }; A91DBC2C2DE58191001F42ED /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; A91DBC2E2DE58191001F42ED /* FaceBoxView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceBoxView.swift; sourceTree = ""; }; A91DBC2F2DE58191001F42ED /* FaceDetectionControlsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDetectionControlsView.swift; sourceTree = ""; }; @@ -91,28 +100,25 @@ A91DBC3E2DE58191001F42ED /* AppStateCoordinator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppStateCoordinator.swift; sourceTree = ""; }; A91DBC3F2DE58191001F42ED /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; A91DBC402DE58191001F42ED /* AuthenticationOverlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AuthenticationOverlayView.swift; sourceTree = ""; }; - A91DBC412DE58191001F42ED /* AuthManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AuthManager.swift; sourceTree = ""; }; - A91DBC422DE58191001F42ED /* CamControl.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CamControl.swift; sourceTree = ""; }; A91DBC432DE58191001F42ED /* CameraModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraModel.swift; sourceTree = ""; }; A91DBC442DE58191001F42ED /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; - A91DBC452DE58191001F42ED /* EncryptionManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EncryptionManager.swift; sourceTree = ""; }; - A91DBC462DE58191001F42ED /* FaceDetector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDetector.swift; sourceTree = ""; }; - A91DBC472DE58191001F42ED /* FileManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FileManager.swift; sourceTree = ""; }; - A91DBC482DE58191001F42ED /* KeyManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = KeyManager.swift; sourceTree = ""; }; - A91DBC492DE58191001F42ED /* LocationManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationManager.swift; sourceTree = ""; }; A91DBC4A2DE58191001F42ED /* PhotoDetailViewImpl.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoDetailViewImpl.swift; sourceTree = ""; }; - A91DBC4B2DE58191001F42ED /* PINManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PINManager.swift; sourceTree = ""; }; A91DBC4C2DE58191001F42ED /* PINSetupView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PINSetupView.swift; sourceTree = ""; }; A91DBC4D2DE58191001F42ED /* PINVerificationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PINVerificationView.swift; sourceTree = ""; }; - A91DBC4E2DE58191001F42ED /* PrivacyShield.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PrivacyShield.swift; sourceTree = ""; }; - A91DBC4F2DE58191001F42ED /* ScreenCaptureManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenCaptureManager.swift; sourceTree = ""; }; + A91DBC4E2DE58191001F42ED /* PrivacyShieldView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PrivacyShieldView.swift; sourceTree = ""; }; A91DBC502DE58191001F42ED /* SecureGalleryView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecureGalleryView.swift; sourceTree = ""; }; A91DBC512DE58191001F42ED /* SettingsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsView.swift; sourceTree = ""; }; A91DBC522DE58191001F42ED /* SnapSafeApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SnapSafeApp.swift; sourceTree = ""; }; - A9445CBA2DE7DD7B0038119B /* PhotoMetadata.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoMetadata.swift; sourceTree = ""; }; + A93DD0412DF7EFA900D5DB42 /* PhotoMetadata.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoMetadata.swift; sourceTree = ""; }; A9DE37472DC5F34400679C2C /* SnapSafe.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SnapSafe.app; sourceTree = BUILT_PRODUCTS_DIR; }; A9DE37572DC5F34600679C2C /* SnapSafeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SnapSafeTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; A9DE37612DC5F34600679C2C /* SnapSafeUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SnapSafeUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + A9DEA9AC2DF94EAD0064A95F /* ScreenshotTakenView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenshotTakenView.swift; sourceTree = ""; }; + A9DEA9AE2DF94EE50064A95F /* ScreenRecordingBlockerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenRecordingBlockerView.swift; sourceTree = ""; }; + A9DEA9B02DF956610064A95F /* PhotoCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoCell.swift; sourceTree = ""; }; + A9DEA9B42DF9579D0064A95F /* FocusIndicatorView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FocusIndicatorView.swift; sourceTree = ""; }; + A9DEA9B62DF957B50064A95F /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; + A9DEAA922E0E71FB0064A95F /* SecureGalleryViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = SecureGalleryViewModel.swift; path = ../Views/SecureGalleryViewModel.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ @@ -168,12 +174,12 @@ A91DBC2B2DE58191001F42ED /* Models */ = { isa = PBXGroup; children = ( + A9DEAA922E0E71FB0064A95F /* SecureGalleryViewModel.swift */, A91DBC252DE58191001F42ED /* AppearanceMode.swift */, A91DBC262DE58191001F42ED /* DetectedFace.swift */, A91DBC272DE58191001F42ED /* MaskMode.swift */, A91DBC432DE58191001F42ED /* CameraModel.swift */, - A91DBC282DE58191001F42ED /* MemoryManager.swift */, - A91DBC2A2DE58191001F42ED /* SecurePhoto.swift */, + A93DD0412DF7EFA900D5DB42 /* PhotoMetadata.swift */, ); path = Models; sourceTree = ""; @@ -225,6 +231,19 @@ isa = PBXGroup; children = ( A91DBC3C2DE58191001F42ED /* PhotoDetail */, + A91DBC402DE58191001F42ED /* AuthenticationOverlayView.swift */, + A91DBC442DE58191001F42ED /* ContentView.swift */, + A9DEA9B62DF957B50064A95F /* CameraView.swift */, + A9DEA9B42DF9579D0064A95F /* FocusIndicatorView.swift */, + A9DEA9B02DF956610064A95F /* PhotoCell.swift */, + A91DBC4A2DE58191001F42ED /* PhotoDetailViewImpl.swift */, + A91DBC4C2DE58191001F42ED /* PINSetupView.swift */, + A91DBC4D2DE58191001F42ED /* PINVerificationView.swift */, + A9DEA9AC2DF94EAD0064A95F /* ScreenshotTakenView.swift */, + A9DEA9AE2DF94EE50064A95F /* ScreenRecordingBlockerView.swift */, + A91DBC502DE58191001F42ED /* SecureGalleryView.swift */, + A91DBC512DE58191001F42ED /* SettingsView.swift */, + A91DBC4E2DE58191001F42ED /* PrivacyShieldView.swift */, ); path = Views; sourceTree = ""; @@ -239,23 +258,6 @@ A91DBC3D2DE58191001F42ED /* Views */, A91DBC3E2DE58191001F42ED /* AppStateCoordinator.swift */, A91DBC3F2DE58191001F42ED /* Assets.xcassets */, - A91DBC402DE58191001F42ED /* AuthenticationOverlayView.swift */, - A91DBC412DE58191001F42ED /* AuthManager.swift */, - A91DBC422DE58191001F42ED /* CamControl.swift */, - A91DBC442DE58191001F42ED /* ContentView.swift */, - A91DBC452DE58191001F42ED /* EncryptionManager.swift */, - A91DBC462DE58191001F42ED /* FaceDetector.swift */, - A91DBC472DE58191001F42ED /* FileManager.swift */, - A91DBC482DE58191001F42ED /* KeyManager.swift */, - A91DBC492DE58191001F42ED /* LocationManager.swift */, - A91DBC4A2DE58191001F42ED /* PhotoDetailViewImpl.swift */, - A91DBC4B2DE58191001F42ED /* PINManager.swift */, - A91DBC4C2DE58191001F42ED /* PINSetupView.swift */, - A91DBC4D2DE58191001F42ED /* PINVerificationView.swift */, - A91DBC4E2DE58191001F42ED /* PrivacyShield.swift */, - A91DBC4F2DE58191001F42ED /* ScreenCaptureManager.swift */, - A91DBC502DE58191001F42ED /* SecureGalleryView.swift */, - A91DBC512DE58191001F42ED /* SettingsView.swift */, A91DBC522DE58191001F42ED /* SnapSafeApp.swift */, ); path = SnapSafe; @@ -264,7 +266,6 @@ A9DE373E2DC5F34400679C2C = { isa = PBXGroup; children = ( - A9445CBA2DE7DD7B0038119B /* PhotoMetadata.swift */, A91DBB422DE41BAE001F42ED /* SnapSafe.xctestplan */, A91DBC532DE58191001F42ED /* SnapSafe */, A9DE375A2DC5F34600679C2C /* SnapSafeTests */, @@ -367,6 +368,9 @@ LastSwiftUpdateCheck = 1620; LastUpgradeCheck = 1620; TargetAttributes = { + A93DD0432DF7F3CB00D5DB42 = { + CreatedOnToolsVersion = 16.2; + }; A9DE37462DC5F34400679C2C = { CreatedOnToolsVersion = 16.2; }; @@ -400,6 +404,7 @@ A9DE37462DC5F34400679C2C /* SnapSafe */, A9DE37562DC5F34600679C2C /* SnapSafeTests */, A9DE37602DC5F34600679C2C /* SnapSafeUITests */, + A93DD0432DF7F3CB00D5DB42 /* Periphery */, ); }; /* End PBXProject section */ @@ -430,46 +435,61 @@ }; /* End PBXResourcesBuildPhase section */ +/* Begin PBXShellScriptBuildPhase section */ + A93DD0472DF7F3FA00D5DB42 /* ShellScript */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 12; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + ); + outputFileListPaths = ( + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "periphery scan --format xcode --strict --project SnapSafe.xcodeproj --schemes SnapSafe\n"; + }; +/* End PBXShellScriptBuildPhase section */ + /* Begin PBXSourcesBuildPhase section */ A9DE37432DC5F34400679C2C /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( A91DBC542DE58191001F42ED /* AppearanceMode.swift in Sources */, + A93DD0422DF7EFA900D5DB42 /* PhotoMetadata.swift in Sources */, + A9DEA9AF2DF94EE50064A95F /* ScreenRecordingBlockerView.swift in Sources */, A91DBC552DE58191001F42ED /* DetectedFace.swift in Sources */, A91DBC562DE58191001F42ED /* MaskMode.swift in Sources */, - A91DBC572DE58191001F42ED /* MemoryManager.swift in Sources */, - A91DBC592DE58191001F42ED /* SecurePhoto.swift in Sources */, A91DBC5A2DE58191001F42ED /* FaceBoxView.swift in Sources */, + A9DEA9B72DF957B50064A95F /* CameraView.swift in Sources */, A91DBC5B2DE58191001F42ED /* FaceDetectionControlsView.swift in Sources */, A91DBC5C2DE58191001F42ED /* FaceDetectionOverlay.swift in Sources */, A91DBC5D2DE58191001F42ED /* PhotoControlsView.swift in Sources */, A91DBC5E2DE58191001F42ED /* ZoomableImageView.swift in Sources */, A91DBC5F2DE58191001F42ED /* ZoomLevelIndicator.swift in Sources */, A91DBC602DE58191001F42ED /* ZoomableModifier.swift in Sources */, + A9DEAA932E0E71FB0064A95F /* SecureGalleryViewModel.swift in Sources */, A91DBC612DE58191001F42ED /* EnhancedPhotoDetailView.swift in Sources */, - A9445CBB2DE7DD7B0038119B /* PhotoMetadata.swift in Sources */, A91DBC622DE58191001F42ED /* ImageInfoView.swift in Sources */, A91DBC632DE58191001F42ED /* PhotoDetail.swift in Sources */, A91DBC642DE58191001F42ED /* PhotoDetailView.swift in Sources */, A91DBC652DE58191001F42ED /* PhotoDetailViewModel.swift in Sources */, A91DBC662DE58191001F42ED /* AppStateCoordinator.swift in Sources */, A91DBC672DE58191001F42ED /* AuthenticationOverlayView.swift in Sources */, - A91DBC682DE58191001F42ED /* AuthManager.swift in Sources */, - A91DBC692DE58191001F42ED /* CamControl.swift in Sources */, + A9DEA9AD2DF94EAD0064A95F /* ScreenshotTakenView.swift in Sources */, A91DBC6A2DE58191001F42ED /* CameraModel.swift in Sources */, A91DBC6B2DE58191001F42ED /* ContentView.swift in Sources */, - A91DBC6C2DE58191001F42ED /* EncryptionManager.swift in Sources */, - A91DBC6D2DE58191001F42ED /* FaceDetector.swift in Sources */, - A91DBC6E2DE58191001F42ED /* FileManager.swift in Sources */, - A91DBC6F2DE58191001F42ED /* KeyManager.swift in Sources */, - A91DBC702DE58191001F42ED /* LocationManager.swift in Sources */, + A9DEA9B12DF956610064A95F /* PhotoCell.swift in Sources */, + A9DEA9B52DF9579D0064A95F /* FocusIndicatorView.swift in Sources */, A91DBC712DE58191001F42ED /* PhotoDetailViewImpl.swift in Sources */, - A91DBC722DE58191001F42ED /* PINManager.swift in Sources */, A91DBC732DE58191001F42ED /* PINSetupView.swift in Sources */, A91DBC742DE58191001F42ED /* PINVerificationView.swift in Sources */, - A91DBC752DE58191001F42ED /* PrivacyShield.swift in Sources */, - A91DBC762DE58191001F42ED /* ScreenCaptureManager.swift in Sources */, + A91DBC752DE58191001F42ED /* PrivacyShieldView.swift in Sources */, A91DBC772DE58191001F42ED /* SecureGalleryView.swift in Sources */, A91DBC782DE58191001F42ED /* SettingsView.swift in Sources */, A91DBC792DE58191001F42ED /* SnapSafeApp.swift in Sources */, @@ -506,6 +526,26 @@ /* End PBXTargetDependency section */ /* Begin XCBuildConfiguration section */ + A93DD0452DF7F3CB00D5DB42 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = BP75F4S5N3; + ENABLE_USER_SCRIPT_SANDBOXING = NO; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Debug; + }; + A93DD0462DF7F3CB00D5DB42 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = BP75F4S5N3; + ENABLE_USER_SCRIPT_SANDBOXING = NO; + PRODUCT_NAME = "$(TARGET_NAME)"; + }; + name = Release; + }; A9DE37692DC5F34600679C2C /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { @@ -770,6 +810,15 @@ /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ + A93DD0442DF7F3CB00D5DB42 /* Build configuration list for PBXAggregateTarget "Periphery" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + A93DD0452DF7F3CB00D5DB42 /* Debug */, + A93DD0462DF7F3CB00D5DB42 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Debug; + }; A9DE37422DC5F34400679C2C /* Build configuration list for PBXProject "SnapSafe" */ = { isa = XCConfigurationList; buildConfigurations = ( diff --git a/SnapSafe.xcodeproj/xcshareddata/xcschemes/SnapSafe.xcscheme b/SnapSafe.xcodeproj/xcshareddata/xcschemes/SnapSafe.xcscheme index 2b0f432..ec0ac6e 100644 --- a/SnapSafe.xcodeproj/xcshareddata/xcschemes/SnapSafe.xcscheme +++ b/SnapSafe.xcodeproj/xcshareddata/xcschemes/SnapSafe.xcscheme @@ -61,8 +61,10 @@ SchemeUserState + Periphery.xcscheme_^#shared#^_ + + orderHint + 1 + SnapSafe.xcscheme_^#shared#^_ orderHint diff --git a/SnapSafe/Repositories/AuthManager.swift b/SnapSafe/Repositories/AuthRepository.swift similarity index 99% rename from SnapSafe/Repositories/AuthManager.swift rename to SnapSafe/Repositories/AuthRepository.swift index ac12118..e29431a 100644 --- a/SnapSafe/Repositories/AuthManager.swift +++ b/SnapSafe/Repositories/AuthRepository.swift @@ -1,5 +1,5 @@ // -// AuthManager.swift +// AuthRepository.swift // SnapSafe // // Created by Bill Booth on 5/3/25. diff --git a/SnapSafe/Repositories/PINManager.swift b/SnapSafe/Repositories/PINRepository.swift similarity index 99% rename from SnapSafe/Repositories/PINManager.swift rename to SnapSafe/Repositories/PINRepository.swift index 1f4b5e3..3656bfb 100644 --- a/SnapSafe/Repositories/PINManager.swift +++ b/SnapSafe/Repositories/PINRepository.swift @@ -1,5 +1,5 @@ // -// PINManager.swift +// PINRepository.swift // SnapSafe // // Created by Bill Booth on 5/22/25. diff --git a/SnapSafe/Repositories/PrivacyOverlayManager.swift b/SnapSafe/Repositories/PrivacyOverlayManager.swift new file mode 100644 index 0000000..0d81a81 --- /dev/null +++ b/SnapSafe/Repositories/PrivacyOverlayManager.swift @@ -0,0 +1,176 @@ +// +// PrivacyOverlayManager.swift +// SnapSafe +// +// Created by Claude Code on 6/25/25. +// + +import Combine +import SwiftUI +import UIKit + +/// Manager for privacy overlay window that prevents app content from appearing in task switcher +final class PrivacyOverlayManager: ObservableObject { + static let shared = PrivacyOverlayManager() + + private var overlayWindow: UIWindow? + private var cancellables = Set() + + private init() { + setupNotificationObservers() + } + + private func setupNotificationObservers() { + // Use the earliest possible notification to show privacy shield + // This fires BEFORE iOS takes the snapshot for task switcher + NotificationCenter.default.publisher(for: UIApplication.protectedDataWillBecomeUnavailableNotification) + .sink { [weak self] _ in + print("PrivacyOverlayManager: protectedDataWillBecomeUnavailable - showing overlay") + self?.showPrivacyOverlay() + } + .store(in: &cancellables) + + // Backup trigger in case protectedDataWillBecomeUnavailable doesn't fire + NotificationCenter.default.publisher(for: UIApplication.willResignActiveNotification) + .sink { [weak self] _ in + print("PrivacyOverlayManager: willResignActive - showing overlay (backup)") + self?.showPrivacyOverlay() + } + .store(in: &cancellables) + + // Hide overlay when app becomes active + NotificationCenter.default.publisher(for: UIApplication.didBecomeActiveNotification) + .sink { [weak self] _ in + print("PrivacyOverlayManager: didBecomeActive - hiding overlay") + self?.hidePrivacyOverlay() + } + .store(in: &cancellables) + + // Also hide on protectedDataDidBecomeAvailable + NotificationCenter.default.publisher(for: UIApplication.protectedDataDidBecomeAvailableNotification) + .sink { [weak self] _ in + print("PrivacyOverlayManager: protectedDataDidBecomeAvailable - hiding overlay") + self?.hidePrivacyOverlay() + } + .store(in: &cancellables) + } + + private func showPrivacyOverlay() { + DispatchQueue.main.async { [weak self] in + self?.createAndShowOverlayWindow() + } + } + + private func hidePrivacyOverlay() { + DispatchQueue.main.async { [weak self] in + self?.destroyOverlayWindow() + } + } + + private func createAndShowOverlayWindow() { + guard overlayWindow == nil else { + print("PrivacyOverlayManager: Overlay already showing") + return + } + + print("PrivacyOverlayManager: Creating privacy overlay window") + + // Create window that covers entire screen and stays on top + if let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene { + overlayWindow = UIWindow(windowScene: windowScene) + overlayWindow?.windowLevel = UIWindow.Level.alert + 1000 // Ensure it's above everything + overlayWindow?.isHidden = false + overlayWindow?.backgroundColor = .black + + // Create the privacy shield view controller + let privacyVC = PrivacyShieldViewController() + overlayWindow?.rootViewController = privacyVC + + print("PrivacyOverlayManager: Privacy overlay window created and shown") + } else { + print("PrivacyOverlayManager: ERROR - Could not find window scene") + } + } + + private func destroyOverlayWindow() { + guard overlayWindow != nil else { + print("PrivacyOverlayManager: No overlay to hide") + return + } + + print("PrivacyOverlayManager: Destroying privacy overlay window") + overlayWindow?.isHidden = true + overlayWindow?.rootViewController = nil + overlayWindow = nil + } +} + +/// UIViewController that displays the privacy shield +private class PrivacyShieldViewController: UIViewController { + override func viewDidLoad() { + super.viewDidLoad() + setupPrivacyShieldView() + } + + private func setupPrivacyShieldView() { + view.backgroundColor = UIColor.black.withAlphaComponent(0.98) + + // Create the privacy shield content + let containerView = UIView() + containerView.translatesAutoresizingMaskIntoConstraints = false + view.addSubview(containerView) + + // App icon + let iconImageView = UIImageView() + iconImageView.image = UIImage(systemName: "lock.shield.fill") + iconImageView.tintColor = .white + iconImageView.contentMode = .scaleAspectFit + iconImageView.translatesAutoresizingMaskIntoConstraints = false + containerView.addSubview(iconImageView) + + // App name + let appNameLabel = UILabel() + appNameLabel.text = "SnapSafe" + appNameLabel.font = UIFont.systemFont(ofSize: 32, weight: .bold) + appNameLabel.textColor = .white + appNameLabel.textAlignment = .center + appNameLabel.translatesAutoresizingMaskIntoConstraints = false + containerView.addSubview(appNameLabel) + + // Privacy message + let messageLabel = UILabel() + messageLabel.text = "The camera app that minds its own business." + messageLabel.font = UIFont.systemFont(ofSize: 20, weight: .medium) + messageLabel.textColor = .gray + messageLabel.textAlignment = .center + messageLabel.numberOfLines = 0 + messageLabel.translatesAutoresizingMaskIntoConstraints = false + containerView.addSubview(messageLabel) + + // Setup constraints + NSLayoutConstraint.activate([ + // Container centered + containerView.centerXAnchor.constraint(equalTo: view.centerXAnchor), + containerView.centerYAnchor.constraint(equalTo: view.centerYAnchor), + containerView.leadingAnchor.constraint(greaterThanOrEqualTo: view.leadingAnchor, constant: 40), + containerView.trailingAnchor.constraint(lessThanOrEqualTo: view.trailingAnchor, constant: -40), + + // Icon + iconImageView.topAnchor.constraint(equalTo: containerView.topAnchor), + iconImageView.centerXAnchor.constraint(equalTo: containerView.centerXAnchor), + iconImageView.widthAnchor.constraint(equalToConstant: 100), + iconImageView.heightAnchor.constraint(equalToConstant: 100), + + // App name + appNameLabel.topAnchor.constraint(equalTo: iconImageView.bottomAnchor, constant: 30), + appNameLabel.leadingAnchor.constraint(equalTo: containerView.leadingAnchor), + appNameLabel.trailingAnchor.constraint(equalTo: containerView.trailingAnchor), + + // Message + messageLabel.topAnchor.constraint(equalTo: appNameLabel.bottomAnchor, constant: 20), + messageLabel.leadingAnchor.constraint(equalTo: containerView.leadingAnchor), + messageLabel.trailingAnchor.constraint(equalTo: containerView.trailingAnchor), + messageLabel.bottomAnchor.constraint(equalTo: containerView.bottomAnchor), + ]) + } +} diff --git a/SnapSafe/Repositories/SecurePhotoRepository.swift b/SnapSafe/Repositories/SecurePhotoRepository.swift index c558046..779f734 100644 --- a/SnapSafe/Repositories/SecurePhotoRepository.swift +++ b/SnapSafe/Repositories/SecurePhotoRepository.swift @@ -11,88 +11,94 @@ import UIKit class SecurePhoto: Identifiable, Equatable { let id: String - let encryptedData: Data + let rawPhotoData: Data // Store original photo data for binary fidelity let metadata: PhotoMetadata // Memory tracking var isVisible: Bool = false private var lastAccessTime: Date = .init() - // Use lazy loading for images to reduce memory usage - private var _thumbnail: UIImage? - private var _fullImage: UIImage? + // Lazy-loaded image caches - generated from rawPhotoData on demand + private var _cachedFullImage: UIImage? + private var _cachedThumbnail: UIImage? - // Cache for decrypted images - var cachedImage: UIImage? - var cachedThumbnail: UIImage? + // MARK: - Image Access Properties - // Thumbnail is loaded on demand and cached + /// Full-size image loaded lazily from raw photo data + var fullImage: UIImage { + // Update last access time and mark as visible + lastAccessTime = Date() + isVisible = true + + // Return cached image if available + if let cached = _cachedFullImage { + return cached + } + + // Generate full image from raw data + guard let image = UIImage(data: rawPhotoData) else { + print("Failed to create UIImage from rawPhotoData for photo \(id)") + return UIImage(systemName: "photo") ?? UIImage() + } + + // Cache the generated image + _cachedFullImage = image + MemoryManager.shared.reportFullImageLoaded() + + return image + } + + /// Thumbnail image loaded lazily and cached var thumbnail: UIImage { // Update last access time and mark as visible lastAccessTime = Date() isVisible = true - // Check for cached thumbnail first - if let cachedThumbnail { - return cachedThumbnail + // Return cached thumbnail if available + if let cached = _cachedThumbnail { + return cached + } + + // Generate thumbnail from raw data + guard let fullSizeImage = UIImage(data: rawPhotoData) else { + print("Failed to create thumbnail from rawPhotoData for photo \(id)") + return UIImage(systemName: "photo") ?? UIImage() } - if let legacyThumbnail = _thumbnail { - return legacyThumbnail + // Generate thumbnail with proper aspect ratio preservation + let maxThumbnailSize: CGFloat = 200 + let originalSize = fullSizeImage.size + + // Calculate scale factor to fit within max size while preserving aspect ratio + let scale = min(maxThumbnailSize / originalSize.width, maxThumbnailSize / originalSize.height) + let thumbnailSize = CGSize( + width: originalSize.width * scale, + height: originalSize.height * scale + ) + + let renderer = UIGraphicsImageRenderer(size: thumbnailSize) + let thumbnail = renderer.image { _ in + fullSizeImage.draw(in: CGRect(origin: .zero, size: thumbnailSize)) } - // Fallback to placeholder - return UIImage(systemName: "photo") ?? UIImage() + // Cache the generated thumbnail + _cachedThumbnail = thumbnail + MemoryManager.shared.reportThumbnailLoaded() + + return thumbnail } - // Method to get thumbnail using decrypted data -// func thumbnail(from decryptedData: Data) -> UIImage? { -// // Check cache first -// if let cached = cachedThumbnail { -// return cached -// } -// -// // Generate thumbnail from decrypted data -// guard let fullImage = UIImage(data: decryptedData) else { -// return nil -// } -// -// // Generate thumbnail -// let thumbnailSize = CGSize(width: 200, height: 200) -// let renderer = UIGraphicsImageRenderer(size: thumbnailSize) -// let thumbnail = renderer.image { _ in -// fullImage.draw(in: CGRect(origin: .zero, size: thumbnailSize)) -// } -// -// // Cache the thumbnail -// cachedThumbnail = thumbnail -// return thumbnail -// } + // MARK: - Utility Methods - // Store decrypted image in cache -// func cacheDecryptedImage(_ decryptedData: Data) -> UIImage? { -// // Update last access time and mark as visible -// lastAccessTime = Date() -// isVisible = true -// -// // Check cache first -// if let cached = cachedImage { -// return cached -// } -// -// // Create image from decrypted data -// guard let image = UIImage(data: decryptedData) else { -// return nil -// } -// -// // Cache the image -// cachedImage = image -// -// // Notify memory manager -// MemoryManager.shared.reportFullImageLoaded() -// -// return image -// } + /// Get the size of the raw photo data in bytes + var dataSizeBytes: Int { + rawPhotoData.count + } + + /// Get a formatted string representation of the photo data size + var formattedDataSize: String { + ByteCountFormatter.string(fromByteCount: Int64(dataSizeBytes), countStyle: .file) + } // Mark as no longer visible in the UI func markAsInvisible() { @@ -104,45 +110,59 @@ class SecurePhoto: Identifiable, Equatable { Date().timeIntervalSince(lastAccessTime) } - // Clear memory when no longer needed + // MARK: - Memory Management + + /// Clear cached images to free memory func clearMemory(keepThumbnail: Bool = true) { - if cachedImage != nil { - cachedImage = nil + // Clear full image cache + if _cachedFullImage != nil { + _cachedFullImage = nil MemoryManager.shared.reportFullImageUnloaded() } - if _fullImage != nil { - _fullImage = nil - MemoryManager.shared.reportFullImageUnloaded() + // Clear thumbnail cache if requested + if !keepThumbnail, _cachedThumbnail != nil { + _cachedThumbnail = nil + MemoryManager.shared.reportThumbnailUnloaded() } + } - if !keepThumbnail { - if cachedThumbnail != nil { - cachedThumbnail = nil - MemoryManager.shared.reportThumbnailUnloaded() - } + /// Force regenerate thumbnail from raw data (useful after photo edits) + func regenerateThumbnail() { + _cachedThumbnail = nil + // Next access to thumbnail property will regenerate it + } - if _thumbnail != nil { - _thumbnail = nil - MemoryManager.shared.reportThumbnailUnloaded() - } - } + /// Force regenerate full image from raw data + func regenerateFullImage() { + _cachedFullImage = nil + // Next access to fullImage property will regenerate it } + // MARK: - Computed Properties + var isDecoy: Bool { metadata.isDecoy } - var fullImage: UIImage { - cachedImage ?? thumbnail - } + // MARK: - Initialization - init(id: String, encryptedData: Data, metadata: PhotoMetadata, cachedImage: UIImage? = nil, cachedThumbnail: UIImage? = nil) { + /// Initialize SecurePhoto with raw photo data + /// - Parameters: + /// - id: Unique identifier for the photo + /// - rawPhotoData: Original photo data for binary fidelity + /// - metadata: Photo metadata including creation date, faces, etc. + init(id: String, rawPhotoData: Data, metadata: PhotoMetadata) { self.id = id - self.encryptedData = encryptedData + self.rawPhotoData = rawPhotoData self.metadata = metadata - self.cachedImage = cachedImage - self.cachedThumbnail = cachedThumbnail + } + + /// Legacy initializer for backward compatibility during migration + /// - Note: This converts UIImage back to Data, prefer using rawPhotoData directly + convenience init(id: String, legacyImage: UIImage, metadata: PhotoMetadata) { + let imageData = legacyImage.jpegData(compressionQuality: 0.95) ?? Data() + self.init(id: id, rawPhotoData: imageData, metadata: metadata) } static func == (lhs: SecurePhoto, rhs: SecurePhoto) -> Bool { diff --git a/SnapSafe/SnapSafeApp.swift b/SnapSafe/SnapSafeApp.swift index 9f7a8ba..2205f62 100644 --- a/SnapSafe/SnapSafeApp.swift +++ b/SnapSafe/SnapSafeApp.swift @@ -11,6 +11,9 @@ import SwiftUI struct SnapSafeApp: App { @AppStorage("appearanceMode") private var appearanceMode: AppearanceMode = .system + // Initialize the privacy overlay manager on app launch + private let privacyOverlayManager = PrivacyOverlayManager.shared + var body: some Scene { WindowGroup { ContentView() diff --git a/SnapSafe/Views/ContentView.swift b/SnapSafe/Views/ContentView.swift index 570be9a..ad3a908 100644 --- a/SnapSafe/Views/ContentView.swift +++ b/SnapSafe/Views/ContentView.swift @@ -159,7 +159,6 @@ struct ContentView: View { .animation(.easeInOut(duration: 0.1), value: isShutterAnimating) .sheet(isPresented: $isShowingSettings) { SettingsView() - .obscuredWhenInactive() .screenCaptureProtected() .handleAppState(isPresented: $isShowingSettings) .withAuthenticationOverlay() @@ -169,14 +168,11 @@ struct ContentView: View { SecureGalleryView(onDismiss: { isShowingGallery = false }) - .obscuredWhenInactive() .screenCaptureProtected() .handleAppState(isPresented: $isShowingGallery) .withAuthenticationOverlay() } } - // Apply privacy shield when app is inactive (task switcher, control center, etc.) - .obscuredWhenInactive() // Protect against screen recording and screenshots .screenCaptureProtected() // Monitor PIN setup completion diff --git a/SnapSafe/Views/PhotoCell.swift b/SnapSafe/Views/PhotoCell.swift index 0b0a7ff..1f2417a 100644 --- a/SnapSafe/Views/PhotoCell.swift +++ b/SnapSafe/Views/PhotoCell.swift @@ -20,10 +20,11 @@ struct PhotoCell: View { var body: some View { ZStack(alignment: .topTrailing) { - // Photo image that maintains aspect ratio + // Photo image that fills the available space Image(uiImage: photo.thumbnail) .resizable() - .aspectRatio(contentMode: .fit) // Use .fit to maintain aspect ratio + .aspectRatio(1.0, contentMode: .fill) // Square aspect ratio, fill to avoid gaps + .clipped() .cornerRadius(10) .onTapGesture(perform: onTap) .overlay( diff --git a/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift b/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift index 21bcbc8..c23da38 100644 --- a/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift +++ b/SnapSafe/Views/PhotoDetail/PhotoDetailViewModel.swift @@ -88,7 +88,7 @@ class PhotoDetailViewModel: ObservableObject { } else { // Should never happen but just in case let emptyMetadata = PhotoMetadata(id: UUID().uuidString, fileSize: 0) - return SecurePhoto(id: UUID().uuidString, encryptedData: Data(), metadata: emptyMetadata) + return SecurePhoto(id: UUID().uuidString, rawPhotoData: Data(), metadata: emptyMetadata) } } diff --git a/SnapSafe/Views/PrivacyShieldView.swift b/SnapSafe/Views/PrivacyShieldView.swift index 371bae3..dbe16cc 100644 --- a/SnapSafe/Views/PrivacyShieldView.swift +++ b/SnapSafe/Views/PrivacyShieldView.swift @@ -5,6 +5,7 @@ // Created by Bill Booth on 5/22/25. // +import Combine import SwiftUI /// Privacy shield to cover content when app is inactive @@ -44,6 +45,7 @@ struct PrivacyShield: View { struct ObscureWhenInactive: ViewModifier { @Environment(\.scenePhase) private var phase @State private var obscured = false + @State private var lastStateChange = Date() func body(content: Content) -> some View { ZStack { @@ -51,20 +53,53 @@ struct ObscureWhenInactive: ViewModifier { content .blur(radius: obscured ? 20 : 0) - // Privacy shield overlay - if obscured { - PrivacyShield() - .transition(.opacity) + // Privacy shield overlay - always present but conditionally opaque + PrivacyShield() + .opacity(obscured ? 1.0 : 0.0) + .allowsHitTesting(obscured) + .onChange(of: obscured) { _, newValue in + print("PrivacyShield opacity changed - obscured: \(newValue), opacity: \(newValue ? 1.0 : 0.0)") + } + } + // Use system notifications as primary trigger - they're more reliable than scene phase + .onReceive(NotificationCenter.default.publisher(for: UIApplication.willResignActiveNotification)) { _ in + print("willResignActiveNotification received - setting obscured to true") + setObscuredState(true, source: "willResignActive") + } + .onReceive(NotificationCenter.default.publisher(for: UIApplication.didBecomeActiveNotification)) { _ in + print("didBecomeActiveNotification received - setting obscured to false") + // Add small delay to prevent flicker when transitioning back to active + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + setObscuredState(false, source: "didBecomeActive") } } + // Keep scene phase as backup but with debouncing .onChange(of: phase) { _, newPhase in - // .inactive fires while the task-switcher is animating - // .background fires a moment later print("Scene phase changed to: \(newPhase)") - obscured = (newPhase != .active) + let shouldObscure = (newPhase != .active) + + // Only update if enough time has passed since last change (debouncing) + let timeSinceLastChange = Date().timeIntervalSince(lastStateChange) + if timeSinceLastChange > 0.2 { + print("Scene phase backup trigger - setting obscured to: \(shouldObscure)") + setObscuredState(shouldObscure, source: "scenePhase") + } else { + print("Scene phase change ignored due to debouncing (last change \(timeSinceLastChange)s ago)") + } + } + } + + private func setObscuredState(_ newState: Bool, source: String) { + Task { @MainActor in + if obscured != newState { + print("[\(source)] Changing obscured from \(obscured) to \(newState)") + obscured = newState + lastStateChange = Date() + print("[\(source)] Obscured state updated to: \(obscured)") + } else { + print("[\(source)] Obscured state already \(newState), no change needed") + } } - // Use quick animation for immediate shield appearance - .animation(.easeInOut(duration: 0.15), value: obscured) } } diff --git a/SnapSafe/Views/SecureGalleryView.swift b/SnapSafe/Views/SecureGalleryView.swift index a8375be..24aad2b 100644 --- a/SnapSafe/Views/SecureGalleryView.swift +++ b/SnapSafe/Views/SecureGalleryView.swift @@ -235,7 +235,7 @@ struct SecureGalleryView: View { } // MARK: - View Components - + // Photo grid subview private var photosGridView: some View { ScrollView { @@ -261,7 +261,7 @@ struct SecureGalleryView: View { .padding() } } - + // Import progress overlay private var importProgressOverlay: some View { VStack { @@ -283,7 +283,7 @@ struct SecureGalleryView: View { } // MARK: - Action Methods - + private func handlePhotoTap(_ photo: SecurePhoto) { if viewModel.isSelecting { viewModel.togglePhotoSelection(photo, isSelectingDecoys: isSelectingDecoys) @@ -295,7 +295,7 @@ struct SecureGalleryView: View { // Save selected photos as decoys private func saveDecoySelections() { viewModel.saveDecoySelections() - + // Reset selection and exit decoy mode isSelectingDecoys = false @@ -303,4 +303,4 @@ struct SecureGalleryView: View { onDismiss?() dismiss() } -} \ No newline at end of file +} diff --git a/SnapSafe/Views/SecureGalleryViewModel.swift b/SnapSafe/Views/SecureGalleryViewModel.swift index 27fe7e7..8caaa8f 100644 --- a/SnapSafe/Views/SecureGalleryViewModel.swift +++ b/SnapSafe/Views/SecureGalleryViewModel.swift @@ -76,26 +76,17 @@ class SecureGalleryViewModel: ObservableObject { isDecoy: isDecoy ) - // Create UIImage and generate thumbnail - guard let image = UIImage(data: imageData) else { + // Validate that the image data is valid before creating SecurePhoto + guard UIImage(data: imageData) != nil else { print("Invalid image data for \(filename)") continue } - // Generate thumbnail - let thumbnailSize = CGSize(width: 200, height: 200) - let renderer = UIGraphicsImageRenderer(size: thumbnailSize) - let thumbnail = renderer.image { _ in - image.draw(in: CGRect(origin: .zero, size: thumbnailSize)) - } - - // Create SecurePhoto object with cached images (legacy system uses unencrypted data) + // Create SecurePhoto object with raw data for binary fidelity let securePhoto = SecurePhoto( id: filename, - encryptedData: Data(), // Empty since legacy system doesn't encrypt - metadata: metadata, - cachedImage: image, - cachedThumbnail: thumbnail + rawPhotoData: imageData, // Store raw data directly + metadata: metadata ) loadedPhotos.append(securePhoto) diff --git a/SnapSafeTests/Shared/MockSecureImageRepository.swift b/SnapSafeTests/Shared/MockSecureImageRepository.swift new file mode 100644 index 0000000..bae4685 --- /dev/null +++ b/SnapSafeTests/Shared/MockSecureImageRepository.swift @@ -0,0 +1,95 @@ +// +// MockSecureImageRepository.swift +// SnapSafeTests +// +// Created by Bill Booth on 5/28/25. +// + +import Foundation +@testable import SnapSafe + +// MARK: - Mock Repository + +// class MockSecureImageRepository: SecureImageRepositoryProtocol { +// var mockPhotos: [SecurePhoto] = [] +// var mockUpdatedPhoto: SecurePhoto? +// var loadAllPhotosCalled = false +// var loadPhotoCalled = false +// var deletePhotoCalled = false +// var loadPhotosWithPredicateCalled = false +// var preloadAdjacentPhotosCalled = false +// var updateFaceDetectionResultsCalled = false +// +// var lastLoadedPhotoId: String? +// var lastDeletedPhotoId: String? +// var lastPredicate: PhotoPredicate? +// var lastPreloadCurrentId: String? +// var lastPreloadAdjacentCount: Int? +// var lastUpdatedPhotoId: String? +// var lastUpdatedFaces: [DetectedFace]? +// +// func savePhoto(_ imageData: Data, metadata: PhotoMetadata) async throws -> SecurePhoto { +// let photo = SecurePhoto(id: metadata.id, encryptedData: imageData, metadata: metadata) +// mockPhotos.append(photo) +// return photo +// } +// +// func loadPhoto(withId id: String) async throws -> SecurePhoto { +// loadPhotoCalled = true +// lastLoadedPhotoId = id +// guard let photo = mockPhotos.first(where: { $0.id == id }) else { +// throw SecureImageRepositoryError.photoNotFound(id: id) +// } +// return photo +// } +// +// func loadAllPhotos() async throws -> [SecurePhoto] { +// loadAllPhotosCalled = true +// return mockPhotos +// } +// +// func deletePhoto(withId id: String) async throws { +// deletePhotoCalled = true +// lastDeletedPhotoId = id +// mockPhotos.removeAll { $0.id == id } +// } +// +// func loadPhotosWithPredicate(_ predicate: PhotoPredicate) async throws -> [SecurePhoto] { +// loadPhotosWithPredicateCalled = true +// lastPredicate = predicate +// return mockPhotos +// } +// +// func preloadAdjacentPhotos(currentId: String, adjacentCount: Int) async { +// preloadAdjacentPhotosCalled = true +// lastPreloadCurrentId = currentId +// lastPreloadAdjacentCount = adjacentCount +// } +// +// func importFromCamera(_ imageData: Data) async throws -> SecurePhoto { +// let id = UUID().uuidString +// let metadata = PhotoMetadata(id: id, fileSize: imageData.count) +// return try await savePhoto(imageData, metadata: metadata) +// } +// +// func importFromLibrary(_ imageData: Data) async throws -> SecurePhoto { +// let id = UUID().uuidString +// let metadata = PhotoMetadata(id: id, fileSize: imageData.count) +// return try await savePhoto(imageData, metadata: metadata) +// } +// +// func exportPhoto(_: SecurePhoto, format _: ExportFormat) async throws -> Data { +// return Data("exported".utf8) +// } +// +// func updateFaceDetectionResults(_ photoId: String, faces: [DetectedFace]) async throws -> SecurePhoto { +// updateFaceDetectionResultsCalled = true +// lastUpdatedPhotoId = photoId +// lastUpdatedFaces = faces +// return mockUpdatedPhoto ?? mockPhotos.first(where: { $0.id == photoId })! +// } +// +// func preloadThumbnails(for _: [String]) async {} +// +// func clearCache() {} +// } diff --git a/SnapSafeTests/SnapSafeTests.swift b/SnapSafeTests/SnapSafeTests.swift index 427d512..7050da1 100644 --- a/SnapSafeTests/SnapSafeTests.swift +++ b/SnapSafeTests/SnapSafeTests.swift @@ -24,6 +24,68 @@ class SnapSafeTests: XCTestCase { XCTAssertTrue(true, "Basic test should pass") } + func testThumbnailAspectRatioPreservation() throws { + // Test that thumbnails preserve aspect ratio for various image dimensions + let testCases = [ + (width: 800, height: 600), // 4:3 landscape + (width: 600, height: 800), // 3:4 portrait + (width: 1000, height: 1000), // 1:1 square + (width: 1920, height: 1080), // 16:9 widescreen + (width: 300, height: 1200), // 1:4 tall portrait + ] + + for testCase in testCases { + // Create a test UIImage with specific dimensions + let testImage = createTestImage(width: testCase.width, height: testCase.height) + let originalAspectRatio = Double(testCase.width) / Double(testCase.height) + + // Create metadata for the photo + let metadata = PhotoMetadata( + creationDate: Date(), + location: nil, + cameraModel: "Test Camera", + isDecoy: false, + faces: [] + ) + + // Create SecurePhoto with test data + let imageData = testImage.jpegData(compressionQuality: 1.0) ?? Data() + let securePhoto = SecurePhoto(id: "test-\(testCase.width)x\(testCase.height)", rawPhotoData: imageData, metadata: metadata) + + // Get the thumbnail + let thumbnail = securePhoto.thumbnail + let thumbnailAspectRatio = Double(thumbnail.size.width) / Double(thumbnail.size.height) + + // Verify the aspect ratio is preserved (within a small tolerance for floating point comparison) + let tolerance = 0.01 + XCTAssertEqual(thumbnailAspectRatio, originalAspectRatio, accuracy: tolerance, + "Thumbnail aspect ratio \(thumbnailAspectRatio) should match original \(originalAspectRatio) for \(testCase.width)x\(testCase.height) image") + + // Verify thumbnail is properly scaled (should fit within 200x200) + XCTAssertLessThanOrEqual(thumbnail.size.width, 200, "Thumbnail width should not exceed 200px for \(testCase.width)x\(testCase.height) image") + XCTAssertLessThanOrEqual(thumbnail.size.height, 200, "Thumbnail height should not exceed 200px for \(testCase.width)x\(testCase.height) image") + + // At least one dimension should be at or near the maximum (200px) + let maxDimension = max(thumbnail.size.width, thumbnail.size.height) + XCTAssertGreaterThanOrEqual(maxDimension, 199, "At least one thumbnail dimension should be near maximum (200px) for \(testCase.width)x\(testCase.height) image") + } + } + + private func createTestImage(width: Int, height: Int) -> UIImage { + let size = CGSize(width: width, height: height) + let renderer = UIGraphicsImageRenderer(size: size) + + return renderer.image { context in + // Fill with blue color to make it a visible test image + UIColor.blue.setFill() + context.fill(CGRect(origin: .zero, size: size)) + + // Add a white border to make aspect ratio distortion more obvious + UIColor.white.setStroke() + context.stroke(CGRect(x: 5, y: 5, width: size.width - 10, height: size.height - 10), width: 10) + } + } + func testPerformanceExample() throws { // This is an example of a performance test case. measure {