Skip to content

Commit

Permalink
debug flag added
Browse files Browse the repository at this point in the history
  • Loading branch information
fguzman82 committed Jul 8, 2024
1 parent 6802909 commit 5b4419f
Show file tree
Hide file tree
Showing 12 changed files with 118 additions and 143 deletions.
4 changes: 0 additions & 4 deletions CLIP-Finder2.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
1C62E8EE2C2A211000C1C637 /* PhotosDB.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8EC2C2A211000C1C637 /* PhotosDB.xcdatamodeld */; };
1C62E8F02C2A219D00C1C637 /* Preprocessing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8EF2C2A219D00C1C637 /* Preprocessing.swift */; };
1C62E8F42C2A221F00C1C637 /* CoreDataManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8F32C2A221F00C1C637 /* CoreDataManager.swift */; };
1C62E8F82C2A225E00C1C637 /* ImageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8F72C2A225E00C1C637 /* ImageViewController.swift */; };
1C62E8FC2C2A238500C1C637 /* MLMultiArrayExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8FB2C2A238500C1C637 /* MLMultiArrayExtension.swift */; };
1C62E8FE2C2A24C200C1C637 /* MLMultiArrayTransformer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8FD2C2A24C200C1C637 /* MLMultiArrayTransformer.swift */; };
1C62E9002C2A259F00C1C637 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1C62E8FF2C2A259F00C1C637 /* AppDelegate.swift */; };
Expand Down Expand Up @@ -52,7 +51,6 @@
1C62E8ED2C2A211000C1C637 /* PhotosDB.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = PhotosDB.xcdatamodel; sourceTree = "<group>"; };
1C62E8EF2C2A219D00C1C637 /* Preprocessing.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Preprocessing.swift; sourceTree = "<group>"; };
1C62E8F32C2A221F00C1C637 /* CoreDataManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreDataManager.swift; sourceTree = "<group>"; };
1C62E8F72C2A225E00C1C637 /* ImageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageViewController.swift; sourceTree = "<group>"; };
1C62E8FB2C2A238500C1C637 /* MLMultiArrayExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MLMultiArrayExtension.swift; sourceTree = "<group>"; };
1C62E8FD2C2A24C200C1C637 /* MLMultiArrayTransformer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MLMultiArrayTransformer.swift; sourceTree = "<group>"; };
1C62E8FF2C2A259F00C1C637 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -111,7 +109,6 @@
1C62E8FB2C2A238500C1C637 /* MLMultiArrayExtension.swift */,
1C62E9072C2B556400C1C637 /* MPSGraphPostProcessing.swift */,
1C0105DC2C341F66005870B5 /* clip_mci_image.mlpackage */,
1C62E8F72C2A225E00C1C637 /* ImageViewController.swift */,
1C62E90D2C2C9EC300C1C637 /* CLIP_Tokenizer.swift */,
1C0F0CC62C2E02520007191B /* clip_text.mlpackage */,
1C0F0CC82C2E03FF0007191B /* CLIPTextModel.swift */,
Expand Down Expand Up @@ -203,7 +200,6 @@
buildActionMask = 2147483647;
files = (
1C0105DF2C345433005870B5 /* ProfilerPerformanceStats.swift in Sources */,
1C62E8F82C2A225E00C1C637 /* ImageViewController.swift in Sources */,
1C0F0CCB2C2F12E40007191B /* CameraPreviewView.swift in Sources */,
1C0F0CC72C2E02520007191B /* clip_text.mlpackage in Sources */,
1C62E9082C2B556400C1C637 /* MPSGraphPostProcessing.swift in Sources */,
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
12 changes: 10 additions & 2 deletions CLIP-Finder2/CLIPImageModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ final class CLIPImageModel {
do {
try await loadModel()
} catch {
#if DEBUG
print("Failed to load model: \(error)")
#endif
}
}
}
Expand All @@ -37,19 +39,23 @@ final class CLIPImageModel {
do {
try await loadModel()
} catch {
#if DEBUG
print("Failed to reload model: \(error)")
#endif
}
}

private func loadModel() async throws {
guard let modelURL = Bundle.main.url(forResource: "clip_mci_image", withExtension: "mlmodelc") else {
print("Current bundle URL: \(Bundle.main.bundleURL)")
// print("Current bundle URL: \(Bundle.main.bundleURL)")
throw DataModelError.modelFileNotFound
}

// let compiledURL = try await MLModel.compileModel(at: modelURL)
model = try await MLModel.load(contentsOf: modelURL, configuration: configuration)
print("Model loaded successfully.")
#if DEBUG
print("CLIP image model loaded successfully.")
#endif
}


Expand All @@ -69,7 +75,9 @@ final class CLIPImageModel {
throw NSError(domain: "DataModel", code: 3, userInfo: [NSLocalizedDescriptionKey: "Failed to retrieve MLMultiArray from prediction"])
}
} catch {
#if DEBUG
print("Failed to perform inference: \(error)")
#endif
throw error
}
}
Expand Down
9 changes: 8 additions & 1 deletion CLIP-Finder2/CLIPTextModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ final class CLIPTextModel {
do {
try await loadModel()
} catch {
#if DEBUG
print("Failed to load CLIP text model: \(error)")
#endif
}
}
}
Expand All @@ -39,18 +41,21 @@ final class CLIPTextModel {
do {
try await loadModel()
} catch {
#if DEBUG
print("Failed to reload CLIP text model: \(error)")
#endif
}
}

private func loadModel() async throws {
guard let modelURL = Bundle.main.url(forResource: "clip_text", withExtension: "mlmodelc") else {
print("Current bundle URL: \(Bundle.main.bundleURL)")
throw CLIPTextModelError.modelFileNotFound
}

model = try await MLModel.load(contentsOf: modelURL, configuration: configuration)
#if DEBUG
print("CLIP text model loaded successfully.")
#endif
}

func performInference(_ tokens: [Int32]) async throws -> MLMultiArray? {
Expand All @@ -74,7 +79,9 @@ final class CLIPTextModel {
throw CLIPTextModelError.predictionFailed
}
} catch {
#if DEBUG
print("Failed to perform CLIP text inference: \(error)")
#endif
throw error
}
}
Expand Down
8 changes: 8 additions & 0 deletions CLIP-Finder2/CameraManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB
session.addInput(input!)
}
} catch {
#if DEBUG
print("Error setting device input: \(error.localizedDescription)")
#endif
return
}

Expand Down Expand Up @@ -121,7 +123,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB
session.addInput(input!)
}
} catch {
#if DEBUG
print("Error switching camera: \(error.localizedDescription)")
#endif
}

session.commitConfiguration()
Expand All @@ -145,7 +149,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB

camera.unlockForConfiguration()
} catch {
#if DEBUG
print("Error configurando la cámara: \(error)")
#endif
}
}

Expand All @@ -167,7 +173,9 @@ class CameraManager: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleB

camera.unlockForConfiguration()
} catch {
#if DEBUG
print("Error al enfocar: \(error)")
#endif
}
}
}
Expand Down
12 changes: 12 additions & 0 deletions CLIP-Finder2/CoreDataManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@ class CoreDataManager {

try context.save()
} catch {
#if DEBUG
print("Failed to save vector: \(error)")
#endif
}
}
}
Expand All @@ -67,7 +69,9 @@ class CoreDataManager {
let results = try context.fetch(fetchRequest)
result = results.first?.vectorData as? MLMultiArray
} catch {
#if DEBUG
print("Failed to fetch vector: \(error)")
#endif
}
}
return result
Expand All @@ -89,9 +93,13 @@ class CoreDataManager {
NSManagedObjectContext.mergeChanges(fromRemoteContextSave: changes, into: [self.viewContext])

try context.save()
#if DEBUG
print("All data deleted successfully")
#endif
} catch {
#if DEBUG
print("Failed to delete data: \(error)")
#endif
}
}
}
Expand All @@ -103,7 +111,9 @@ class CoreDataManager {
try context.save()
} catch {
let nsError = error as NSError
#if DEBUG
print("Unresolved error \(nsError), \(nsError.userInfo)")
#endif
}
}
}
Expand All @@ -121,7 +131,9 @@ class CoreDataManager {
return (id: id, vector: vector)
}
} catch {
#if DEBUG
print("Failed to fetch photo vectors: \(error)")
#endif
}
}

Expand Down
14 changes: 14 additions & 0 deletions CLIP-Finder2/CoreMLProfiler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,9 @@ class ModelProfiler: ObservableObject {
await clipImageModel.reloadModel()

guard let dummyInput = createDummyWhitePixelBuffer(width: 256, height: 256) else {
#if DEBUG
print("Failed to create dummy input")
#endif
return
}

Expand All @@ -47,11 +49,15 @@ class ModelProfiler: ObservableObject {
if let _ = try await self.clipImageModel.performInference(dummyInput) {
done()
} else {
#if DEBUG
print("Inference returned nil")
#endif
done()
}
} catch {
#if DEBUG
print("Failed to perform inference: \(error)")
#endif
done()
}
}
Expand Down Expand Up @@ -79,11 +85,15 @@ class ModelProfiler: ObservableObject {
if let _ = try await self.clipTextModel.performInference(dummyInput) {
done()
} else {
#if DEBUG
print("Text inference returned nil")
#endif
done()
}
} catch {
#if DEBUG
print("Failed to perform text inference: \(error)")
#endif
done()
}
}
Expand Down Expand Up @@ -113,7 +123,9 @@ class ModelProfiler: ObservableObject {
&pixelBuffer)

guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
#if DEBUG
print("Failed to create CVPixelBuffer")
#endif
return nil
}

Expand All @@ -128,7 +140,9 @@ class ModelProfiler: ObservableObject {
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue) else {
#if DEBUG
print("Failed to create CGContext")
#endif
return nil
}

Expand Down
83 changes: 0 additions & 83 deletions CLIP-Finder2/ImageViewController.swift

This file was deleted.

4 changes: 4 additions & 0 deletions CLIP-Finder2/MLMultiArrayTransformer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ class MLMultiArrayTransformer: ValueTransformer {
let data = try NSKeyedArchiver.archivedData(withRootObject: multiArray, requiringSecureCoding: true)
return data
} catch {
#if DEBUG
print("Failed to transform MLMultiArray to Data: \(error)")
#endif
return nil
}
}
Expand All @@ -40,7 +42,9 @@ class MLMultiArrayTransformer: ValueTransformer {
let multiArray = try NSKeyedUnarchiver.unarchivedObject(ofClass: MLMultiArray.self, from: data)
return multiArray
} catch {
#if DEBUG
print("Failed to reverse transform Data to MLMultiArray: \(error)")
#endif
return nil
}
}
Expand Down
Loading

0 comments on commit 5b4419f

Please sign in to comment.