Skip to content

Instantly share code, notes, and snippets.

@jacobsapps
jacobsapps / accelerate-normalize.swift
Created April 6, 2026 12:42
L2 Normalization with Accelerate from Apple Photos, Face Tagging, and Embeddings
var norm: Float = 0
vDSP_svesq(embedding, 1, &norm, vDSP_Length(count))
norm = sqrt(norm)
if norm > 0 {
vDSP_vsdiv(embedding, 1, &norm, &embedding, 1, vDSP_Length(count))
}
@jacobsapps
jacobsapps / cluster-faces.swift
Created April 6, 2026 12:38
Face Clustering from Apple Photos, Face Tagging, and Embeddings
func clusterFaces(_ embeddings: [[Float]], threshold: Float) -> [[Int]] {
var graph = Array(repeating: [Int](), count: embeddings.count)
for i in embeddings.indices {
for j in (i + 1)..<embeddings.count {
if dot(embeddings[i], embeddings[j]) >= threshold {
graph[i].append(j)
graph[j].append(i)
}
}
@jacobsapps
jacobsapps / dot-product.swift
Created April 6, 2026 12:38
Dot Product Similarity from Apple Photos, Face Tagging, and Embeddings
func dot(_ a: [Float], _ b: [Float]) -> Float {
zip(a, b).reduce(0) { $0 + $1.0 * $1.1 }
}
@jacobsapps
jacobsapps / compute-embedding.swift
Created April 6, 2026 12:38
Compute Embedding from Apple Photos, Face Tagging, and Embeddings
func computeEmbedding(
face: VNFaceObservation,
image: CGImage,
model: MLModel
) async throws -> [Float]? {
// 1. crop the face out of the original image
guard let crop = cropFace(observation: face, from: image) else { return nil }
// 2. resize the face to the model's input size
let pixelBuffer = try makePixelBuffer(from: crop, size: 160)
@jacobsapps
jacobsapps / quality-filter.swift
Created April 6, 2026 12:38
Quality Filter from Apple Photos, Face Tagging, and Embeddings
let qualityFaces = faces.filter { observation in
let faceWidth = observation.boundingBox.width * CGFloat(cgImage.width)
let faceHeight = observation.boundingBox.height * CGFloat(cgImage.height)
return faceWidth >= 40 && faceHeight >= 40 && observation.confidence >= 0.5
}
@jacobsapps
jacobsapps / detect-faces.swift
Created April 6, 2026 12:38
Face Detection with Vision from Apple Photos, Face Tagging, and Embeddings
func detectFaces(in image: CGImage) async throws -> [VNFaceObservation] {
try await withCheckedThrowingContinuation { continuation in
let request = VNDetectFaceRectanglesRequest { request, error in
if let error {
continuation.resume(throwing: error)
return
}
let faces = request.results as? [VNFaceObservation] ?? []
continuation.resume(returning: faces)
}
@main
struct FakeTwitterApp: App {
@UIApplicationDelegateAdaptor(FakeTwitterAppDelegate.self) private var appDelegate
private let modelContainer: ModelContainer
private let level4Engine: UploadJobEngine
init() {
let schema = Schema([PersistedUploadJob.self])
let configuration = ModelConfiguration(schema: schema, isStoredInMemoryOnly: false)
private func processQueueIfNeeded() async {
guard !isProcessing else { return }
isProcessing = true
defer { isProcessing = false }
while let nextJob = fetchNextOutstandingJobSnapshot() {
do {
try withJob(nextJob.id) { _, job in
job.state = .uploading
job.attempts += 1
func postTweet(
text: String,
strategy _: RetryStrategy,
retryOptions _: RetryOptions
) async throws {
await engine.enqueue(text: text)
}
func enqueue(text: String) async {
let context = ModelContext(container)
enum UploadJobState: String, Codable {
case pending
case uploading
case succeeded
case failed
}
@Model
final class PersistedUploadJob {
@Attribute(.unique) var id: UUID