Implement storage architecture from ai_blueprint.md

Primary sync: replace PersistenceActor JSON file with SwiftData + CloudKit
- Add SavedCardModel (@Model class) and PersistenceController (ModelContainer
  with .automatic CloudKit, fallback to local). BackgroundPersistenceActor
  (@ModelActor) handles all DB I/O off the main thread.
- One-time migration imports user_collection.json into SwiftData and renames
  the original file to prevent re-import.
- Inject modelContainer into SwiftUI environment in IYmtgApp.

Image storage: Documents/UserContent/ subfolder (blueprint requirement)
- ImageManager.dir now targets iCloud Documents/UserContent/ (or local equiv).
- migrateImagesToUserContent() moves existing JPGs to the new subfolder on
  first launch; called during the SwiftData migration.

Firebase: demoted to optional manual backup (metadata only, no images)
- Remove all automatic CloudEngine.save/delete/batchUpdatePrices calls from
  CollectionViewModel mutations.
- Add backupAllToFirebase() for user-triggered metadata sync.
- Add isFirebaseBackupEnabled to AppConfig (default false).
- Add Cloud Backup section in Library settings with iCloud vs Firebase
  explanation and "Backup Metadata to Firebase Now" button.

Also: full modular refactor (Data/, Features/, Services/ directories) and
README updated with CloudKit setup steps and revised release checklist.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-05 12:13:17 -05:00
parent b993ef4020
commit 24dcb44af4
38 changed files with 2786 additions and 2105 deletions

View File

@@ -0,0 +1,131 @@
import Vision
import CoreML
// MARK: - ANALYSIS ACTOR (Core Card Recognition)
actor AnalysisActor {
private var database: [CardMetadata] = []
private var fingerprintCache: [UUID: VNFeaturePrintObservation] = [:]
func loadDatabase(from url: URL) throws {
let data = try Data(contentsOf: url, options: .mappedIfSafe)
let loaded = try JSONDecoder().decode([CardFingerprint].self, from: data)
self.fingerprintCache.removeAll()
self.database.removeAll()
for card in loaded {
if let obs = try? NSKeyedUnarchiver.unarchivedObject(ofClass: VNFeaturePrintObservation.self, from: card.featureData) {
self.fingerprintCache[card.id] = obs
}
self.database.append(CardMetadata(id: card.id, name: card.name, setCode: card.setCode, collectorNumber: card.collectorNumber, hasFoilPrinting: card.hasFoilPrinting, hasSerializedPrinting: card.hasSerializedPrinting ?? false, priceScanned: card.priceScanned))
}
}
func analyze(croppedImage: CGImage, orientation: CGImagePropertyOrientation) async -> (CardMetadata?, Bool) {
guard let print = try? await FeatureMatcher.generateFingerprint(from: croppedImage, orientation: orientation) else { return (nil, false) }
let result = FeatureMatcher.identify(scan: print, database: self.database, cache: self.fingerprintCache)
var resolvedCard: CardMetadata?
var detectedSerialized = false
switch result {
case .exact(let card):
resolvedCard = card
case .unknown:
return (nil, false)
case .ambiguous(_, let candidates):
let (ocrSet, ocrNum, ocrYear, isSerialized) = OCREngine.readCardDetails(image: croppedImage, orientation: orientation)
detectedSerialized = isSerialized
// Run Heuristics to resolve specific ambiguities
let isAlpha = CornerDetector.isAlphaCorner(image: croppedImage, orientation: orientation)
let saturation = SaturationDetector.analyze(image: croppedImage, orientation: orientation)
let borderColor = BorderDetector.detect(image: croppedImage, orientation: orientation)
let hasListSymbol = ListSymbolDetector.hasListSymbol(image: croppedImage, orientation: orientation)
let hasStamp = StampDetector.hasStamp(image: croppedImage, orientation: orientation)
var filtered = candidates
// 1. Alpha (LEA) vs Beta (LEB)
if candidates.contains(where: { $0.setCode == "LEA" }) && candidates.contains(where: { $0.setCode == "LEB" }) {
if isAlpha { filtered = filtered.filter { $0.setCode == "LEA" } }
else { filtered = filtered.filter { $0.setCode == "LEB" } }
}
// 2. Unlimited (2ED) vs Revised (3ED)
if candidates.contains(where: { $0.setCode == "2ED" }) && candidates.contains(where: { $0.setCode == "3ED" }) {
if saturation > 0.25 { filtered = filtered.filter { $0.setCode == "2ED" } }
else { filtered = filtered.filter { $0.setCode == "3ED" || $0.setCode == "SUM" } }
}
// 3. The List / Mystery
if hasListSymbol {
let listSets = ["PLIST", "MB1", "UPLIST", "H1R"]
let listCandidates = filtered.filter { listSets.contains($0.setCode) }
if !listCandidates.isEmpty { filtered = listCandidates }
}
// 4. World Champ (Gold Border)
if borderColor == .gold {
let wcCandidates = filtered.filter { $0.setCode.hasPrefix("WC") }
if !wcCandidates.isEmpty { filtered = wcCandidates }
}
// 5. Promo Stamps
if hasStamp {
let promoCandidates = filtered.filter { $0.setCode.lowercased().hasPrefix("p") }
if !promoCandidates.isEmpty { filtered = promoCandidates }
}
// 6. Chronicles (White Border) vs Originals (Black Border)
let chroniclesOriginals = ["ARN", "ATQ", "LEG", "DRK"]
if candidates.contains(where: { $0.setCode == "CHR" }) && candidates.contains(where: { chroniclesOriginals.contains($0.setCode) }) {
if borderColor == .white { filtered = filtered.filter { $0.setCode == "CHR" } }
else if borderColor == .black { filtered = filtered.filter { chroniclesOriginals.contains($0.setCode) } }
}
// 7. Summer Magic (Edgar) - 1994 Copyright on Revised
if let year = ocrYear, year == "1994", candidates.contains(where: { $0.setCode == "3ED" }) {
let sumCandidates = filtered.filter { $0.setCode == "SUM" }
if !sumCandidates.isEmpty { filtered = sumCandidates }
} else if candidates.contains(where: { $0.setCode == "3ED" }) {
filtered = filtered.filter { $0.setCode != "SUM" }
}
var resolved: CardMetadata?
if let set = ocrSet, let num = ocrNum, let match = filtered.first(where: { $0.setCode.uppercased() == set && $0.collectorNumber == num }) { resolved = match }
else if let set = ocrSet, let match = filtered.first(where: { $0.setCode.uppercased() == set }) { resolved = match }
else if let set = SetSymbolEngine.recognizeSet(image: croppedImage, orientation: orientation), let match = filtered.first(where: { $0.setCode.caseInsensitiveCompare(set) == .orderedSame }) { resolved = match }
else if let set = ClusterEngine.refine(candidates: filtered), let match = filtered.first(where: { $0.setCode == set }) { resolved = match }
else { resolved = filtered.first ?? candidates.first }
resolvedCard = resolved
}
guard let card = resolvedCard else { return (nil, false) }
// DB CHECK: Only run/trust OCR serialization if the card is known to have a serialized printing
if card.hasSerializedPrinting {
if case .exact = result {
let (_, _, _, isSer) = OCREngine.readCardDetails(image: croppedImage, orientation: orientation)
detectedSerialized = isSer
}
} else {
detectedSerialized = false
}
return (card, detectedSerialized)
}
}
// MARK: - CLUSTER ENGINE (Ambiguity Resolution)
class ClusterEngine {
static func refine(candidates: [CardMetadata]) -> String? {
// Weighted voting: 1st candidate = 3 pts, 2nd = 2 pts, others = 1 pt
var scores: [String: Int] = [:]
for (index, card) in candidates.prefix(5).enumerated() {
let weight = max(1, 3 - index)
scores[card.setCode, default: 0] += weight
}
return scores.sorted { $0.value > $1.value }.first?.key
}
}

View File

@@ -0,0 +1,38 @@
import Vision
import CoreGraphics
import ImageIO
class FeatureMatcher {
static let revision = VNGenerateImageFeaturePrintRequest.Revision.revision1
static func generateFingerprint(from image: CGImage, orientation: CGImagePropertyOrientation = .up) async throws -> VNFeaturePrintObservation {
let req = VNGenerateImageFeaturePrintRequest()
req.revision = revision
req.imageCropAndScaleOption = .scaleFill
let handler = VNImageRequestHandler(cgImage: image, orientation: orientation, options: [:])
try handler.perform([req])
guard let result = req.results?.first as? VNFeaturePrintObservation else {
throw NSError(domain: "FeatureMatcher", code: -1, userInfo: [NSLocalizedDescriptionKey: "No features detected"])
}
return result
}
static func identify(scan: VNFeaturePrintObservation, database: [CardMetadata], cache: [UUID: VNFeaturePrintObservation]) -> MatchResult {
var candidates: [(CardMetadata, Float)] = []
for card in database {
guard let obs = cache[card.id] else { continue }
var dist: Float = 0
if (try? scan.computeDistance(&dist, to: obs)) != nil && dist < 18.0 {
candidates.append((card, dist))
}
}
let sorted = candidates.sorted { $0.1 < $1.1 }
guard let best = sorted.first else { return .unknown }
if best.1 < 6.0 { return .exact(best.0) }
let close = sorted.filter { $0.1 < (best.1 + 3.0) }
if close.count > 1 { return .ambiguous(name: best.0.name, candidates: close.map { $0.0 }) }
return .exact(best.0)
}
}

View File

@@ -0,0 +1,35 @@
import CoreImage
import CoreGraphics
// MARK: - BORDER DETECTOR
class BorderDetector {
enum BorderColor { case black, white, gold, other }
static func detect(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> BorderColor {
let context = SharedEngineResources.context
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
// Crop a small strip from the left edge
let cropRect = CGRect(x: CGFloat(width) * 0.02, y: CGFloat(height) * 0.4, width: CGFloat(width) * 0.05, height: CGFloat(height) * 0.2)
let vector = CIVector(cgRect: cropRect)
let filter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: ciImage, kCIInputExtentKey: vector])
guard let output = filter?.outputImage else { return .other }
var bitmap = [UInt8](repeating: 0, count: 4)
context.render(output, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
let r = Int(bitmap[0])
let g = Int(bitmap[1])
let b = Int(bitmap[2])
let brightness = (r + g + b) / 3
// Gold/Yellow detection (World Champ Decks): High Red/Green, Low Blue
if r > 140 && g > 120 && b < 100 && r > b + 40 { return .gold }
if brightness < 60 { return .black }
if brightness > 180 { return .white }
return .other
}
}

View File

@@ -0,0 +1,39 @@
import CoreImage
import CoreGraphics
// MARK: - CORNER DETECTOR (Alpha vs Beta)
class CornerDetector {
static func isAlphaCorner(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Bool {
// Alpha corners are 2mm radius (very round). Beta are 1mm (standard).
// We analyze the top-left corner (4% of width).
// If significantly more "background" (non-black) pixels exist in the corner square, it's Alpha.
let context = SharedEngineResources.context
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
let cornerSize = Int(Double(width) * 0.04)
// FIX: Analyze Top-Left corner (y is at top in CIImage coordinates)
let cropRect = CGRect(x: 0, y: CGFloat(height) - CGFloat(cornerSize), width: CGFloat(cornerSize), height: CGFloat(cornerSize))
let cropped = ciImage.cropped(to: cropRect)
var bitmap = [UInt8](repeating: 0, count: cornerSize * cornerSize * 4)
context.render(cropped, toBitmap: &bitmap, rowBytes: cornerSize * 4, bounds: cropRect, format: .RGBA8, colorSpace: nil)
var backgroundPixelCount = 0
let totalPixels = cornerSize * cornerSize
for i in stride(from: 0, to: bitmap.count, by: 4) {
let r = Int(bitmap[i])
let g = Int(bitmap[i + 1])
let b = Int(bitmap[i + 2])
let brightness = (r + g + b) / 3
if brightness > 80 { backgroundPixelCount += 1 }
}
// Alpha corners reveal roughly 30-40% background in a tight corner crop. Beta reveals < 20%.
return Double(backgroundPixelCount) / Double(totalPixels) > 0.25
}
}

View File

@@ -0,0 +1,30 @@
import CoreImage
import CoreGraphics
// MARK: - LIST SYMBOL DETECTOR
class ListSymbolDetector {
static func hasListSymbol(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Bool {
// The "List" / "Mystery" symbol is a small white icon in the bottom-left corner.
// It sits roughly at x: 3-7%, y: 93-97% of the card frame.
let context = SharedEngineResources.context
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
// FIX: CIImage uses Bottom-Left origin. The List symbol is Bottom-Left.
let cropRect = CGRect(x: width * 0.03, y: height * 0.03, width: width * 0.05, height: height * 0.04)
let vector = CIVector(cgRect: cropRect)
guard let filter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: ciImage, kCIInputExtentKey: vector]),
let output = filter.outputImage else { return false }
var bitmap = [UInt8](repeating: 0, count: 4)
context.render(output, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
// A white symbol on a black border will significantly raise the average brightness.
// Pure black ~ 0-20. With symbol ~ 80-150.
let brightness = (Int(bitmap[0]) + Int(bitmap[1]) + Int(bitmap[2])) / 3
return brightness > 60
}
}

View File

@@ -0,0 +1,30 @@
import CoreImage
import CoreGraphics
// MARK: - SATURATION DETECTOR (Unlimited vs Revised)
class SaturationDetector {
static func analyze(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Double {
// Crop to center 50% to analyze artwork saturation, ignoring borders
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
let cropRect = CGRect(x: width * 0.25, y: height * 0.25, width: width * 0.5, height: height * 0.5)
let vector = CIVector(cgRect: cropRect)
guard let filter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: ciImage, kCIInputExtentKey: vector]),
let output = filter.outputImage else { return 0 }
var bitmap = [UInt8](repeating: 0, count: 4)
let context = SharedEngineResources.context
context.render(output, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
// Simple Saturation approximation: (Max - Min) / Max
let r = Double(bitmap[0]) / 255.0
let g = Double(bitmap[1]) / 255.0
let b = Double(bitmap[2]) / 255.0
let maxC = max(r, max(g, b))
let minC = min(r, min(g, b))
return maxC == 0 ? 0 : (maxC - minC) / maxC
}
}

View File

@@ -0,0 +1,21 @@
import Vision
import CoreML
import CoreGraphics
// MARK: - STAMP DETECTOR (Promos)
class StampDetector {
static var model: VNCoreMLModel? = {
guard AppConfig.enableStampDetection else { return nil }
return ModelManager.shared.getModel(name: "IYmtgStampClassifier")
}()
static func hasStamp(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Bool {
guard let model = model else { return false }
let request = VNCoreMLRequest(model: model)
request.imageCropAndScaleOption = .scaleFill
let handler = VNImageRequestHandler(cgImage: image, orientation: orientation, options: [:])
try? handler.perform([request])
guard let results = request.results as? [VNClassificationObservation], let top = results.first else { return false }
return top.identifier == "Stamped" && top.confidence > 0.8
}
}

View File

@@ -0,0 +1,69 @@
import Vision
import CoreGraphics
// MARK: - OCR ENGINE
class OCREngine {
static func readCardDetails(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> (setCode: String?, number: String?, year: String?, isSerialized: Bool) {
let request = VNRecognizeTextRequest()
request.recognitionLevel = .accurate
request.usesLanguageCorrection = false
let handler = VNImageRequestHandler(cgImage: image, orientation: orientation)
try? handler.perform([request])
guard let obs = request.results as? [VNRecognizedTextObservation] else { return (nil, nil, nil, false) }
var possibleSetCode: String?
var possibleNumber: String?
var possibleYear: String?
var isSerialized = false
for observation in obs {
guard let candidate = observation.topCandidates(1).first else { continue }
let text = candidate.string.trimmingCharacters(in: .whitespacesAndNewlines)
// Set Code: 3-5 chars, uppercase
// FIX: Ensure it's in the bottom half to avoid reading Card Name (e.g. "FOG") as Set Code
// FIX: Must contain at least one letter to avoid reading years or numbers as Set Codes
if text.count >= 3 && text.count <= 5 && text == text.uppercased() && possibleSetCode == nil && text.rangeOfCharacter(from: .letters) != nil {
// Vision coordinates: (0,0) is Bottom-Left. y < 0.5 is Bottom Half.
// FIX: Tighten to y < 0.2 to match Collector Number and fully exclude Text Box (e.g. "FLY")
if observation.boundingBox.origin.y < 0.2 { possibleSetCode = text }
}
// Collector Number & Serialized
if text.contains("/") && text.count <= 10 {
// FILTER: Ignore Power/Toughness in bottom-right corner (x > 0.75, y < 0.2)
if observation.boundingBox.origin.x > 0.75 && observation.boundingBox.origin.y < 0.2 { continue }
// FIX: Tighten standard location to bottom 20% to avoid text box stats (e.g. "10/10" token)
let isStandardLocation = observation.boundingBox.origin.y < 0.2
let parts = text.split(separator: "/")
if parts.count == 2 {
let numStr = parts[0].trimmingCharacters(in: .whitespacesAndNewlines)
let denomStr = parts[1].trimmingCharacters(in: .whitespacesAndNewlines)
if let num = Int(numStr), let denom = Int(denomStr) {
if isStandardLocation {
if denom < 10 { continue }
if possibleNumber == nil { possibleNumber = numStr }
} else if observation.boundingBox.origin.y > 0.5 {
// FIX: Only consider Top Half (Art) as Serialized to avoid Text Box false positives
isSerialized = true
}
}
}
} else if text.count >= 1 && text.count <= 5, let first = text.first, first.isNumber, possibleNumber == nil {
// FIX: Only accept simple numbers if they are at the very bottom (Collector Number location)
// AND not in the bottom-right corner (Power/Toughness zone)
if observation.boundingBox.origin.y < 0.2 && observation.boundingBox.origin.x < 0.75 { possibleNumber = text }
}
// Copyright Year (for Summer Magic detection)
if let range = text.range(of: #"(19|20)\d{2}"#, options: .regularExpression) {
if observation.boundingBox.origin.y < 0.15 {
possibleYear = String(text[range])
}
}
}
return (possibleSetCode, possibleNumber, possibleYear, isSerialized)
}
}