import SwiftUI import UIKit import Vision import AVFoundation import AudioToolbox import Combine import os import ImageIO import CoreMedia import CoreImage // MARK: - SCANNER VIEW MODEL // Focused exclusively on camera management, frame processing, and the scanning pipeline. // Collection state and persistence are delegated to CollectionViewModel. @MainActor class ScannerViewModel: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate { // MARK: Scanner State @Published var statusText = "Initializing Database..." @Published var isDatabaseLoading = true @Published var detectedCard: CardMetadata? @Published var detectedDamages: [DamageObservation] = [] @Published var isProcessing = false @Published var isFound = false @Published var isPermissionDenied = false @Published var showDatabaseAlert = false @Published var databaseAlertTitle = "" @Published var databaseAlertMessage = "" @Published var isSaving = false @Published var isTorchOn = false @Published var isAutoScanEnabled: Bool = UserDefaults.standard.bool(forKey: "isAutoScanEnabled") { didSet { UserDefaults.standard.set(isAutoScanEnabled, forKey: "isAutoScanEnabled") } } @Published var currentFoilType: String = AppConfig.Defaults.defaultFoil // MARK: Collection Forwarding (read-only convenience for ScannerView & CardDetailView) // Changes to collectionVM's @Published properties propagate via objectWillChange forwarding. var isCollectionLoading: Bool { collectionVM.isCollectionLoading } var isConnected: Bool { collectionVM.isConnected } var collections: [String] { collectionVM.collections } var boxes: [String] { collectionVM.boxes } var selectedCurrency: CurrencyCode { collectionVM.selectedCurrency } var currentCollection: String { get { collectionVM.currentCollection } set { collectionVM.currentCollection = newValue } } var currentBox: String { get { collectionVM.currentBox } set { collectionVM.currentBox = newValue } } // MARK: Collection Method Forwarding (for CardDetailView backward compatibility) func saveManualCard(_ card: SavedCard) { collectionVM.saveManualCard(card) } func updateCardDetails(_ card: SavedCard) { collectionVM.updateCardDetails(card) } // MARK: Dependencies let collectionVM: CollectionViewModel var currentFrameImage: CGImage? private var collectionVMObservation: AnyCancellable? private var cancellables = Set() private var processingTask: Task? private let analyzer = AnalysisActor() public let session = AVCaptureSession() private let sessionQueue = DispatchQueue(label: "com.iymtg.sessionQueue") private let foilEngine = FoilEngine() private var lastFrameTime = Date.distantPast private var lastSaveTime = Date.distantPast private let successHaptics = UINotificationFeedbackGenerator() private let detectHaptics = UIImpactFeedbackGenerator(style: .light) private var saveTask: Task? private let processingLock = OSAllocatedUnfairLock(initialState: false) private var isScanningActive = false private var isSessionConfigured = false private var focusResetTask: Task? // MARK: Init init(collectionVM: CollectionViewModel) { self.collectionVM = collectionVM super.init() // Forward collectionVM changes so ScannerView re-renders on collection state updates collectionVMObservation = collectionVM.objectWillChange .sink { [weak self] _ in self?.objectWillChange.send() } UIDevice.current.beginGeneratingDeviceOrientationNotifications() // Pre-warm ML models in background Task(priority: .background) { let _ = ConditionEngine.model; let _ = FoilEngine.model } // Load card fingerprint database Task.detached(priority: .userInitiated) { [weak self] in guard let self = self else { return } if let url = Bundle.main.url(forResource: "cards", withExtension: "json") { do { try await self.analyzer.loadDatabase(from: url) await MainActor.run { [weak self] in guard let self = self else { return } self.isDatabaseLoading = false self.statusText = "Ready to Scan" self.checkCameraPermissions() } } catch { await MainActor.run { [weak self] in guard let self = self else { return } self.isDatabaseLoading = false self.databaseAlertTitle = "Database Error" self.databaseAlertMessage = "Failed to load card database. Please try restarting the app." self.showDatabaseAlert = true } } } else { await MainActor.run { [weak self] in guard let self = self else { return } self.isDatabaseLoading = false self.databaseAlertTitle = "Database Missing" self.databaseAlertMessage = "The card database could not be found. Please reinstall the app." self.showDatabaseAlert = true } } } DevEngine.activateIfCompiled() ModelManager.shared.checkForUpdates() } deinit { UIDevice.current.endGeneratingDeviceOrientationNotifications() processingTask?.cancel() saveTask?.cancel() focusResetTask?.cancel() } // MARK: - Camera Permissions & Setup func checkCameraPermissions() { switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: self.isPermissionDenied = false if !isSessionConfigured { self.setupCamera() } else { self.startSession() } case .notDetermined: Task { [weak self] in if await AVCaptureDevice.requestAccess(for: .video) { self?.isPermissionDenied = false; self?.setupCamera() } else { self?.isPermissionDenied = true; self?.statusText = "Camera Access Denied" } } case .denied, .restricted: self.isPermissionDenied = true; self.statusText = "Camera Access Denied" @unknown default: break } } func setupCamera() { if self.isPermissionDenied { return } DispatchQueue.global(qos: .background).async { try? AVAudioSession.sharedInstance().setCategory(.ambient, mode: .default, options: .mixWithOthers) try? AVAudioSession.sharedInstance().setActive(true) } sessionQueue.async { [weak self] in guard let self = self else { return } guard let dev = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), let input = try? AVCaptureDeviceInput(device: dev) else { return } self.session.beginConfiguration() for i in self.session.inputs { self.session.removeInput(i) } for o in self.session.outputs { self.session.removeOutput(o) } if self.session.canSetSessionPreset(.hd1920x1080) { self.session.sessionPreset = .hd1920x1080 } if self.session.canAddInput(input) { self.session.addInput(input) } let out = AVCaptureVideoDataOutput() out.setSampleBufferDelegate(self, queue: DispatchQueue(label: "video")) if self.session.canAddOutput(out) { self.session.addOutput(out) } do { try dev.lockForConfiguration() if dev.isFocusModeSupported(.continuousAutoFocus) { dev.focusMode = .continuousAutoFocus } if dev.isSmoothAutoFocusSupported { dev.isSmoothAutoFocusEnabled = true } // OPTIMIZATION: Cap at 30 FPS to reduce thermal load and battery usage dev.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 30) let zoomFactor = min(dev.maxAvailableVideoZoomFactor, max(1.5, dev.minAvailableVideoZoomFactor)) dev.videoZoomFactor = zoomFactor dev.unlockForConfiguration() } catch {} self.session.commitConfiguration() Task { @MainActor [weak self] in self?.isSessionConfigured = true } self.session.startRunning() if dev.hasTorch { let actualState = dev.torchMode == .on Task { @MainActor [weak self] in self?.isTorchOn = actualState } } } } func toggleTorch() { guard let dev = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), dev.hasTorch else { return } sessionQueue.async { [weak self] in try? dev.lockForConfiguration() dev.torchMode = dev.torchMode == .on ? .off : .on dev.unlockForConfiguration() let actualState = dev.torchMode == .on Task { @MainActor [weak self] in self?.isTorchOn = actualState } } } func focusCamera(at point: CGPoint) { focusResetTask?.cancel() sessionQueue.async { guard let dev = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else { return } do { try dev.lockForConfiguration() if dev.isFocusPointOfInterestSupported { dev.focusPointOfInterest = point; dev.focusMode = .autoFocus } if dev.isExposurePointOfInterestSupported { dev.exposurePointOfInterest = point; dev.exposureMode = .autoExpose } dev.unlockForConfiguration() } catch {} } focusResetTask = Task { [weak self] in do { try await Task.sleep(nanoseconds: 4_000_000_000) self?.sessionQueue.async { guard let dev = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else { return } do { try dev.lockForConfiguration() if dev.isFocusModeSupported(.continuousAutoFocus) { dev.focusMode = .continuousAutoFocus } dev.unlockForConfiguration() } catch {} } } catch { // Task cancelled, do not reset focus } } } func startSession() { self.isScanningActive = true sessionQueue.async { [weak self] in guard let self = self else { return } if !self.session.isRunning { self.session.startRunning() } } } func stopSession() { self.isScanningActive = false focusResetTask?.cancel() processingTask?.cancel() collectionVM.forceSave() sessionQueue.async { [weak self] in if let dev = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), dev.hasTorch { try? dev.lockForConfiguration(); dev.torchMode = .off; dev.unlockForConfiguration() } guard let self = self else { return } if self.session.isRunning { self.session.stopRunning() } } Task { @MainActor [weak self] in guard let self = self else { return } if !self.isFound { self.cancelScan() } self.isProcessing = false; self.isTorchOn = false } } // MARK: - Frame Processing nonisolated func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let cvBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } guard processingLock.withLock({ if $0 { return false } else { $0 = true; return true } }) else { return } let ciImage = CIImage(cvPixelBuffer: cvBuffer) Task.detached(priority: .userInitiated) { [weak self] in guard let self = self else { return } defer { self.processingLock.withLock { $0 = false } } let (shouldProcess, orientation, isBusy) = await MainActor.run { let now = Date() // OPTIMIZATION: Throttle analysis pipeline to ~15 FPS if now.timeIntervalSince(self.lastFrameTime) < 0.06 { return (false, CGImagePropertyOrientation.up, false) } self.lastFrameTime = now return (self.isScanningActive && !self.isFound && !self.isSaving && !self.isDatabaseLoading && !self.isCollectionLoading && now.timeIntervalSince(self.lastSaveTime) > 2.0, self.getCurrentOrientations().1, self.isProcessing) } guard shouldProcess else { return } guard let cg = SharedEngineResources.context.createCGImage(ciImage, from: ciImage.extent) else { return } let foilType = await self.foilEngine.addFrame(cg, orientation: orientation) await MainActor.run { if let foilType { self.currentFoilType = foilType } } guard !isBusy else { return } let handler = VNImageRequestHandler(cgImage: cg, orientation: .up) let req = VNDetectRectanglesRequest() try? handler.perform([req]) if let obs = req.results?.first as? VNRectangleObservation { Task { @MainActor [weak self] in guard let self = self, self.isScanningActive else { return } self.processingTask = Task { [weak self] in await self?.processCrop(obs, from: cg, orientation: orientation) } } } } } func processCrop(_ obs: VNRectangleObservation, from fullImage: CGImage, orientation: CGImagePropertyOrientation) async { if Task.isCancelled { return } let shouldProceed = await MainActor.run { if self.isFound || self.isProcessing { return false } self.isProcessing = true self.detectHaptics.prepare() return true } guard shouldProceed else { return } let croppedImage: CGImage? = await Task.detached { let width = CGFloat(fullImage.width) let height = CGFloat(fullImage.height) let bbox = obs.boundingBox let rect = CGRect(x: bbox.origin.x * width, y: (1 - bbox.origin.y - bbox.height) * height, width: bbox.width * width, height: bbox.height * height) return fullImage.cropping(to: rect) }.value guard let cropped = croppedImage else { await MainActor.run { self.isProcessing = false } return } if DevEngine.isDevMode { DevEngine.saveRaw(image: UIImage(cgImage: cropped), label: "AutoCrop") } if Task.isCancelled { await MainActor.run { self.isProcessing = false }; return } // OPTIMIZATION: Run identification and damage grading in parallel async let analysis = self.analyzer.analyze(croppedImage: cropped, orientation: orientation) async let damageCheck = Task.detached(priority: .userInitiated) { ConditionEngine.detectDamage(image: cropped, orientation: orientation) }.value let ((finalCard, detectedSerialized), damages) = await (analysis, damageCheck) if Task.isCancelled { await MainActor.run { self.isProcessing = false }; return } await MainActor.run { guard !self.isFound && self.isScanningActive else { self.isProcessing = false; return } if var card = finalCard { self.isFound = true self.successHaptics.notificationOccurred(.success) AudioServicesPlaySystemSound(1108) card.isSerialized = detectedSerialized self.detectedDamages = damages self.detectedCard = card self.currentFrameImage = cropped if self.isAutoScanEnabled { self.saveCurrentCard() } else { self.isProcessing = false } } else { self.isProcessing = false } } } // MARK: - Scan Actions func cancelScan() { self.isFound = false self.detectedCard = nil self.currentFrameImage = nil self.isProcessing = false self.currentFoilType = AppConfig.Defaults.defaultFoil self.statusText = "Ready to Scan" self.processingTask?.cancel() } func saveCurrentCard() { guard let card = detectedCard, let cgImage = currentFrameImage else { return } self.isSaving = true self.lastSaveTime = Date() let imageName = "\(UUID().uuidString).jpg" let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: getCurrentOrientations().0) var entry = SavedCard(from: card, imageName: imageName, collection: collectionVM.currentCollection, location: collectionVM.currentBox) entry.foilType = self.currentFoilType entry.condition = ConditionEngine.overallGrade(damages: self.detectedDamages) let autoScan = self.isAutoScanEnabled // Delegate persistence and cloud sync to CollectionViewModel collectionVM.addCard(entry, cgImage: cgImage, uiImage: uiImage, autoScan: autoScan) if autoScan { DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in self?.cancelScan() self?.isSaving = false } } else { self.cancelScan() self.isSaving = false } } // MARK: - Training Uploads func uploadTrainingImage(label: String) { guard let cg = currentFrameImage else { return } let orientation = getCurrentOrientations().0 let img = UIImage(cgImage: cg, scale: 1.0, orientation: orientation) Task.detached { TrainingUploader.upload(image: img, label: label, force: true) } } func uploadCorrection(image: UIImage?, card: SavedCard, original: SavedCard?) { guard let img = image ?? ImageManager.load(name: card.imageFileName) else { return } var labels: [String] = [] if card.name != original?.name || card.setCode != original?.setCode { labels.append("Identity_\(card.setCode)_\(card.collectorNumber)") } if card.condition != original?.condition { labels.append("Condition_\(card.condition)") } if card.foilType != original?.foilType { labels.append("Foil_\(card.foilType)") } if labels.isEmpty { return } Task.detached { for label in labels { TrainingUploader.upload(image: img, label: label, force: true) } } } // MARK: - Helpers private func getCurrentOrientations() -> (UIImage.Orientation, CGImagePropertyOrientation) { switch UIDevice.current.orientation { case .portrait: return (.right, .right) case .portraitUpsideDown: return (.left, .left) case .landscapeLeft: return (.up, .up) case .landscapeRight: return (.down, .down) default: return (.right, .right) } } }