Implement storage architecture from ai_blueprint.md

Primary sync: replace PersistenceActor JSON file with SwiftData + CloudKit
- Add SavedCardModel (@Model class) and PersistenceController (ModelContainer
  with .automatic CloudKit, fallback to local). BackgroundPersistenceActor
  (@ModelActor) handles all DB I/O off the main thread.
- One-time migration imports user_collection.json into SwiftData and renames
  the original file to prevent re-import.
- Inject modelContainer into SwiftUI environment in IYmtgApp.

Image storage: Documents/UserContent/ subfolder (blueprint requirement)
- ImageManager.dir now targets iCloud Documents/UserContent/ (or local equiv).
- migrateImagesToUserContent() moves existing JPGs to the new subfolder on
  first launch; called during the SwiftData migration.

Firebase: demoted to optional manual backup (metadata only, no images)
- Remove all automatic CloudEngine.save/delete/batchUpdatePrices calls from
  CollectionViewModel mutations.
- Add backupAllToFirebase() for user-triggered metadata sync.
- Add isFirebaseBackupEnabled to AppConfig (default false).
- Add Cloud Backup section in Library settings with iCloud vs Firebase
  explanation and "Backup Metadata to Firebase Now" button.

Also: full modular refactor (Data/, Features/, Services/ directories) and
README updated with CloudKit setup steps and revised release checklist.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-05 12:13:17 -05:00
parent b993ef4020
commit 24dcb44af4
38 changed files with 2786 additions and 2105 deletions

View File

@@ -0,0 +1,35 @@
import CoreImage
import CoreGraphics
// MARK: - BORDER DETECTOR
class BorderDetector {
enum BorderColor { case black, white, gold, other }
static func detect(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> BorderColor {
let context = SharedEngineResources.context
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
// Crop a small strip from the left edge
let cropRect = CGRect(x: CGFloat(width) * 0.02, y: CGFloat(height) * 0.4, width: CGFloat(width) * 0.05, height: CGFloat(height) * 0.2)
let vector = CIVector(cgRect: cropRect)
let filter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: ciImage, kCIInputExtentKey: vector])
guard let output = filter?.outputImage else { return .other }
var bitmap = [UInt8](repeating: 0, count: 4)
context.render(output, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
let r = Int(bitmap[0])
let g = Int(bitmap[1])
let b = Int(bitmap[2])
let brightness = (r + g + b) / 3
// Gold/Yellow detection (World Champ Decks): High Red/Green, Low Blue
if r > 140 && g > 120 && b < 100 && r > b + 40 { return .gold }
if brightness < 60 { return .black }
if brightness > 180 { return .white }
return .other
}
}

View File

@@ -0,0 +1,39 @@
import CoreImage
import CoreGraphics
// MARK: - CORNER DETECTOR (Alpha vs Beta)
class CornerDetector {
static func isAlphaCorner(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Bool {
// Alpha corners are 2mm radius (very round). Beta are 1mm (standard).
// We analyze the top-left corner (4% of width).
// If significantly more "background" (non-black) pixels exist in the corner square, it's Alpha.
let context = SharedEngineResources.context
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
let cornerSize = Int(Double(width) * 0.04)
// FIX: Analyze Top-Left corner (y is at top in CIImage coordinates)
let cropRect = CGRect(x: 0, y: CGFloat(height) - CGFloat(cornerSize), width: CGFloat(cornerSize), height: CGFloat(cornerSize))
let cropped = ciImage.cropped(to: cropRect)
var bitmap = [UInt8](repeating: 0, count: cornerSize * cornerSize * 4)
context.render(cropped, toBitmap: &bitmap, rowBytes: cornerSize * 4, bounds: cropRect, format: .RGBA8, colorSpace: nil)
var backgroundPixelCount = 0
let totalPixels = cornerSize * cornerSize
for i in stride(from: 0, to: bitmap.count, by: 4) {
let r = Int(bitmap[i])
let g = Int(bitmap[i + 1])
let b = Int(bitmap[i + 2])
let brightness = (r + g + b) / 3
if brightness > 80 { backgroundPixelCount += 1 }
}
// Alpha corners reveal roughly 30-40% background in a tight corner crop. Beta reveals < 20%.
return Double(backgroundPixelCount) / Double(totalPixels) > 0.25
}
}

View File

@@ -0,0 +1,30 @@
import CoreImage
import CoreGraphics
// MARK: - LIST SYMBOL DETECTOR
class ListSymbolDetector {
static func hasListSymbol(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Bool {
// The "List" / "Mystery" symbol is a small white icon in the bottom-left corner.
// It sits roughly at x: 3-7%, y: 93-97% of the card frame.
let context = SharedEngineResources.context
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
// FIX: CIImage uses Bottom-Left origin. The List symbol is Bottom-Left.
let cropRect = CGRect(x: width * 0.03, y: height * 0.03, width: width * 0.05, height: height * 0.04)
let vector = CIVector(cgRect: cropRect)
guard let filter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: ciImage, kCIInputExtentKey: vector]),
let output = filter.outputImage else { return false }
var bitmap = [UInt8](repeating: 0, count: 4)
context.render(output, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
// A white symbol on a black border will significantly raise the average brightness.
// Pure black ~ 0-20. With symbol ~ 80-150.
let brightness = (Int(bitmap[0]) + Int(bitmap[1]) + Int(bitmap[2])) / 3
return brightness > 60
}
}

View File

@@ -0,0 +1,30 @@
import CoreImage
import CoreGraphics
// MARK: - SATURATION DETECTOR (Unlimited vs Revised)
class SaturationDetector {
static func analyze(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Double {
// Crop to center 50% to analyze artwork saturation, ignoring borders
let ciImage = CIImage(cgImage: image).oriented(orientation)
let width = ciImage.extent.width
let height = ciImage.extent.height
let cropRect = CGRect(x: width * 0.25, y: height * 0.25, width: width * 0.5, height: height * 0.5)
let vector = CIVector(cgRect: cropRect)
guard let filter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: ciImage, kCIInputExtentKey: vector]),
let output = filter.outputImage else { return 0 }
var bitmap = [UInt8](repeating: 0, count: 4)
let context = SharedEngineResources.context
context.render(output, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
// Simple Saturation approximation: (Max - Min) / Max
let r = Double(bitmap[0]) / 255.0
let g = Double(bitmap[1]) / 255.0
let b = Double(bitmap[2]) / 255.0
let maxC = max(r, max(g, b))
let minC = min(r, min(g, b))
return maxC == 0 ? 0 : (maxC - minC) / maxC
}
}

View File

@@ -0,0 +1,21 @@
import Vision
import CoreML
import CoreGraphics
// MARK: - STAMP DETECTOR (Promos)
class StampDetector {
static var model: VNCoreMLModel? = {
guard AppConfig.enableStampDetection else { return nil }
return ModelManager.shared.getModel(name: "IYmtgStampClassifier")
}()
static func hasStamp(image: CGImage, orientation: CGImagePropertyOrientation = .up) -> Bool {
guard let model = model else { return false }
let request = VNCoreMLRequest(model: model)
request.imageCropAndScaleOption = .scaleFill
let handler = VNImageRequestHandler(cgImage: image, orientation: orientation, options: [:])
try? handler.perform([request])
guard let results = request.results as? [VNClassificationObservation], let top = results.first else { return false }
return top.identifier == "Stamped" && top.confidence > 0.8
}
}