Implement storage architecture from ai_blueprint.md
Primary sync: replace PersistenceActor JSON file with SwiftData + CloudKit - Add SavedCardModel (@Model class) and PersistenceController (ModelContainer with .automatic CloudKit, fallback to local). BackgroundPersistenceActor (@ModelActor) handles all DB I/O off the main thread. - One-time migration imports user_collection.json into SwiftData and renames the original file to prevent re-import. - Inject modelContainer into SwiftUI environment in IYmtgApp. Image storage: Documents/UserContent/ subfolder (blueprint requirement) - ImageManager.dir now targets iCloud Documents/UserContent/ (or local equiv). - migrateImagesToUserContent() moves existing JPGs to the new subfolder on first launch; called during the SwiftData migration. Firebase: demoted to optional manual backup (metadata only, no images) - Remove all automatic CloudEngine.save/delete/batchUpdatePrices calls from CollectionViewModel mutations. - Add backupAllToFirebase() for user-triggered metadata sync. - Add isFirebaseBackupEnabled to AppConfig (default false). - Add Cloud Backup section in Library settings with iCloud vs Firebase explanation and "Backup Metadata to Firebase Now" button. Also: full modular refactor (Data/, Features/, Services/ directories) and README updated with CloudKit setup steps and revised release checklist. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
38
IYmtg_App_iOS/Services/Vision/FeatureMatcher.swift
Normal file
38
IYmtg_App_iOS/Services/Vision/FeatureMatcher.swift
Normal file
@@ -0,0 +1,38 @@
|
||||
import Vision
|
||||
import CoreGraphics
|
||||
import ImageIO
|
||||
|
||||
class FeatureMatcher {
|
||||
static let revision = VNGenerateImageFeaturePrintRequest.Revision.revision1
|
||||
|
||||
static func generateFingerprint(from image: CGImage, orientation: CGImagePropertyOrientation = .up) async throws -> VNFeaturePrintObservation {
|
||||
let req = VNGenerateImageFeaturePrintRequest()
|
||||
req.revision = revision
|
||||
req.imageCropAndScaleOption = .scaleFill
|
||||
let handler = VNImageRequestHandler(cgImage: image, orientation: orientation, options: [:])
|
||||
try handler.perform([req])
|
||||
|
||||
guard let result = req.results?.first as? VNFeaturePrintObservation else {
|
||||
throw NSError(domain: "FeatureMatcher", code: -1, userInfo: [NSLocalizedDescriptionKey: "No features detected"])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
static func identify(scan: VNFeaturePrintObservation, database: [CardMetadata], cache: [UUID: VNFeaturePrintObservation]) -> MatchResult {
|
||||
var candidates: [(CardMetadata, Float)] = []
|
||||
for card in database {
|
||||
guard let obs = cache[card.id] else { continue }
|
||||
var dist: Float = 0
|
||||
if (try? scan.computeDistance(&dist, to: obs)) != nil && dist < 18.0 {
|
||||
candidates.append((card, dist))
|
||||
}
|
||||
}
|
||||
let sorted = candidates.sorted { $0.1 < $1.1 }
|
||||
guard let best = sorted.first else { return .unknown }
|
||||
|
||||
if best.1 < 6.0 { return .exact(best.0) }
|
||||
let close = sorted.filter { $0.1 < (best.1 + 3.0) }
|
||||
if close.count > 1 { return .ambiguous(name: best.0.name, candidates: close.map { $0.0 }) }
|
||||
return .exact(best.0)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user