From 40816b272695726dc969f2bbb024469fb6e77a08 Mon Sep 17 00:00:00 2001 From: Tuan Mai A <62716934+st-tuanmai@users.noreply.github.com> Date: Wed, 22 May 2024 01:30:00 +0700 Subject: [PATCH] iOS update mediapipe tasks vision version from 0.10.12 to 0.10.14 (#391) * update face detector * update unit tests face detector * update version face landmarker * update version of gesture recognizer and change unit test * update version and unit test hand landmarker * update version and unit test image classifier * update version image embedder * update image senmenter version and fix video segmenter * update version object detector and unit test * update version pose landmarker --- .../FaceDetectorTests/FaceDetectorTests.swift | 7 +-- examples/face_detector/ios/Podfile | 2 +- examples/face_detector/ios/Podfile.lock | 14 +++--- .../FaceLandmarkerTests.swift | 3 +- examples/face_landmarker/ios/Podfile | 2 +- examples/face_landmarker/ios/Podfile.lock | 14 +++--- .../GestureRecognizerTests.swift | 47 ++++++++++--------- examples/gesture_recognizer/ios/Podfile | 2 +- examples/gesture_recognizer/ios/Podfile.lock | 14 +++--- .../HandLandmarkerTests.swift | 45 +++++++++--------- examples/hand_landmarker/ios/Podfile | 2 +- examples/hand_landmarker/ios/Podfile.lock | 14 +++--- .../ImageClassifierTests.swift | 3 +- examples/image_classification/ios/Podfile | 2 +- .../image_classification/ios/Podfile.lock | 14 +++--- examples/image_embedder/ios/Podfile | 2 +- examples/image_embedder/ios/Podfile.lock | 14 +++--- .../Services/SegmentedImageRenderer.swift | 16 ++----- .../ViewContoller/CameraViewController.swift | 2 - .../MediaLibraryViewController.swift | 10 ++-- .../ImageSegmenterUITests.swift | 41 ---------------- .../ImageSegmenterUITestsLaunchTests.swift | 1 - examples/image_segmentation/ios/Podfile | 12 +---- examples/image_segmentation/ios/Podfile.lock | 14 +++--- .../ObjectDetector.xcodeproj/project.pbxproj | 16 +------ .../ObjectDetectorTests.swift | 27 ++++++----- .../ObjectDetectorUITests.swift | 41 ---------------- .../ObjectDetectorUITestsLaunchTests.swift | 32 ------------- examples/object_detection/ios/Podfile | 2 +- examples/object_detection/ios/Podfile.lock | 14 +++--- examples/pose_landmarker/ios/Podfile | 2 +- examples/pose_landmarker/ios/Podfile.lock | 14 +++--- 32 files changed, 152 insertions(+), 293 deletions(-) delete mode 100644 examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITests.swift delete mode 100644 examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITestsLaunchTests.swift delete mode 100644 examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITests.swift delete mode 100644 examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITestsLaunchTests.swift diff --git a/examples/face_detector/ios/FaceDetectorTests/FaceDetectorTests.swift b/examples/face_detector/ios/FaceDetectorTests/FaceDetectorTests.swift index 43efad4c..ca151e1d 100644 --- a/examples/face_detector/ios/FaceDetectorTests/FaceDetectorTests.swift +++ b/examples/face_detector/ios/FaceDetectorTests/FaceDetectorTests.swift @@ -33,8 +33,8 @@ final class FaceDetectorTests: XCTestCase { boundingBox: CGRect(x: 126.0, y: 100.0, width: 464.0, height: 464.0), keypoints: nil), Detection( - categories: [ResultCategory(index: 0, score: 0.9263101, categoryName: nil, displayName: nil)], - boundingBox: CGRect(x: 616.0, y: 193, width: 430.0, height: 430.0), + categories: [ResultCategory(index: 0, score: 0.92513907, categoryName: nil, displayName: nil)], + boundingBox: CGRect(x: 616.0, y: 192, width: 430.0, height: 430.0), keypoints: nil) ] @@ -45,7 +45,8 @@ final class FaceDetectorTests: XCTestCase { let FaceDetectorService = FaceDetectorService.stillImageDetectorService( modelPath: modelPath, minDetectionConfidence: minDetectionConfidence, - minSuppressionThreshold: minSuppressionThreshold) + minSuppressionThreshold: minSuppressionThreshold, + delegate: .CPU) return FaceDetectorService! } diff --git a/examples/face_detector/ios/Podfile b/examples/face_detector/ios/Podfile index d63eaed6..2f3f057b 100644 --- a/examples/face_detector/ios/Podfile +++ b/examples/face_detector/ios/Podfile @@ -3,7 +3,7 @@ platform :ios, '15.0' target 'FaceDetector' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' # Pods for FaceDetector end diff --git a/examples/face_detector/ios/Podfile.lock b/examples/face_detector/ios/Podfile.lock index 38cfbfc2..b8a062cb 100644 --- a/examples/face_detector/ios/Podfile.lock +++ b/examples/face_detector/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: afcfdd760c84f5c1e0b096a704ce46b44b4a02f7 +PODFILE CHECKSUM: 6be2fe8a802a3358accb65299f2598dd4d5f57e1 COCOAPODS: 1.14.3 diff --git a/examples/face_landmarker/ios/FaceLandmarkerTests/FaceLandmarkerTests.swift b/examples/face_landmarker/ios/FaceLandmarkerTests/FaceLandmarkerTests.swift index e741e202..b46c2490 100644 --- a/examples/face_landmarker/ios/FaceLandmarkerTests/FaceLandmarkerTests.swift +++ b/examples/face_landmarker/ios/FaceLandmarkerTests/FaceLandmarkerTests.swift @@ -520,7 +520,8 @@ final class FaceLandmarkerTests: XCTestCase { numFaces: 1, minFaceDetectionConfidence: FaceLandmarkerTests.minFaceDetectionConfidence, minFacePresenceConfidence: FaceLandmarkerTests.minFacePresenceConfidence, - minTrackingConfidence: FaceLandmarkerTests.minTrackingConfidence)! + minTrackingConfidence: FaceLandmarkerTests.minTrackingConfidence, + delegate: .CPU)! return faceLandmarkerService } diff --git a/examples/face_landmarker/ios/Podfile b/examples/face_landmarker/ios/Podfile index aeb6f601..2d6c77c2 100644 --- a/examples/face_landmarker/ios/Podfile +++ b/examples/face_landmarker/ios/Podfile @@ -5,7 +5,7 @@ target 'FaceLandmarker' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' end diff --git a/examples/face_landmarker/ios/Podfile.lock b/examples/face_landmarker/ios/Podfile.lock index 325a2e1b..5f1497f6 100644 --- a/examples/face_landmarker/ios/Podfile.lock +++ b/examples/face_landmarker/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: 67c95a3ac9f403fadf44f5735ace6ec9fcf479a6 +PODFILE CHECKSUM: bc9d5dc865832cdf89ed473b9d9ae3482e24f537 COCOAPODS: 1.14.3 diff --git a/examples/gesture_recognizer/ios/GestureRecognizerTests/GestureRecognizerTests.swift b/examples/gesture_recognizer/ios/GestureRecognizerTests/GestureRecognizerTests.swift index dba8e620..bb7ade5e 100644 --- a/examples/gesture_recognizer/ios/GestureRecognizerTests/GestureRecognizerTests.swift +++ b/examples/gesture_recognizer/ios/GestureRecognizerTests/GestureRecognizerTests.swift @@ -31,30 +31,30 @@ final class GestureRecognizerTests: XCTestCase { compatibleWith: nil)! static let result = GestureRecognizerResult( - gestures: [[ResultCategory(index: -1, score: 0.74601436, categoryName: "Thumb_Up", displayName: "")]], + gestures: [[ResultCategory(index: -1, score: 0.7283777, categoryName: "Thumb_Up", displayName: "")]], handedness: [], landmarks: [[ - NormalizedLandmark(x: 0.6146113, y: 0.71075666, z: -4.1557226e-07, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.6142792, y: 0.57649153, z: -0.040831544, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5836266, y: 0.4429407, z: -0.059525516, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5389037, y: 0.33637148, z: -0.07342299, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.536148, y: 0.25158498, z: -0.07771388, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4898765, y: 0.4913109, z: -0.030454714, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4276508, y: 0.50301707, z: -0.06859867, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.49330515, y: 0.52595127, z: -0.0773961, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.52693504, y: 0.5121813, z: -0.07744958, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4763346, y: 0.5743718, z: -0.023844246, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.42159313, y: 0.58094376, z: -0.06347593, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.50296295, y: 0.60153985, z: -0.057907313, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.52059495, y: 0.57536906, z: -0.046426427, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.47042432, y: 0.6498483, z: -0.025004275, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.42147171, y: 0.65280235, z: -0.069050804, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.49437872, y: 0.66357565, z: -0.046906527, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5176527, y: 0.6408466, z: -0.022207312, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4691668, y: 0.7234682, z: -0.029635455, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.43116334, y: 0.7330426, z: -0.056126874, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.48526073, y: 0.7278307, z: -0.041881826, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5117951, y: 0.70887417, z: -0.024859443, visibility: nil, presence: nil) + NormalizedLandmark(x: 0.6129676, y: 0.70157504, z: -4.5833377e-07, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.6159242, y: 0.5730554, z: -0.04404007, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.58462656, y: 0.45141116, z: -0.066422015, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.54258853, y: 0.3550938, z: -0.08355088, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5299578, y: 0.27741316, z: -0.09152996, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.4884828, y: 0.48931584, z: -0.03891499, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.42707062, y: 0.5070781, z: -0.082204446, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.48659548, y: 0.52944756, z: -0.09566363, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5170652, y: 0.5180234, z: -0.097826585, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.47752064, y: 0.5746913, z: -0.030233975, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.42322388, y: 0.58384126, z: -0.06978146, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5008309, y: 0.6011655, z: -0.062682286, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5144273, y: 0.57651, z: -0.048970204, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.47189528, y: 0.65008116, z: -0.029931678, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.4212282, y: 0.6498341, z: -0.071003094, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.49262476, y: 0.65974927, z: -0.04700193, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5128528, y: 0.63937056, z: -0.020825379, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.47315174, y: 0.721069, z: -0.033766963, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.4348337, y: 0.7294104, z: -0.058631197, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.48701334, y: 0.7236482, z: -0.04348786, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5102773, y: 0.7046261, z: -0.02522209, visibility: nil, presence: nil) ]], worldLandmarks: [], timestampInMilliseconds: 0) @@ -68,7 +68,8 @@ final class GestureRecognizerTests: XCTestCase { modelPath: modelPath, minHandDetectionConfidence: GestureRecognizerTests.minHandDetectionConfidence, minHandPresenceConfidence: GestureRecognizerTests.minHandPresenceConfidence, - minTrackingConfidence: GestureRecognizerTests.minTrackingConfidence) + minTrackingConfidence: GestureRecognizerTests.minTrackingConfidence, + delegate: .CPU) return gestureRecognizerService! } diff --git a/examples/gesture_recognizer/ios/Podfile b/examples/gesture_recognizer/ios/Podfile index 471c320c..b5473942 100644 --- a/examples/gesture_recognizer/ios/Podfile +++ b/examples/gesture_recognizer/ios/Podfile @@ -6,7 +6,7 @@ target 'GestureRecognizer' do use_frameworks! # Pods for GestureRecognizer - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' end diff --git a/examples/gesture_recognizer/ios/Podfile.lock b/examples/gesture_recognizer/ios/Podfile.lock index 08991b64..fe6d71e3 100644 --- a/examples/gesture_recognizer/ios/Podfile.lock +++ b/examples/gesture_recognizer/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: f33191b008b693a9fe37c9716b85dce882f844f0 +PODFILE CHECKSUM: 21322833069b2d5064c8614269f9926360ff02bd COCOAPODS: 1.14.3 diff --git a/examples/hand_landmarker/ios/HandLandmarkerTests/HandLandmarkerTests.swift b/examples/hand_landmarker/ios/HandLandmarkerTests/HandLandmarkerTests.swift index e5098c4d..85675865 100644 --- a/examples/hand_landmarker/ios/HandLandmarkerTests/HandLandmarkerTests.swift +++ b/examples/hand_landmarker/ios/HandLandmarkerTests/HandLandmarkerTests.swift @@ -30,27 +30,27 @@ final class HandLandmarkerTests: XCTestCase { compatibleWith: nil)! static let results: [[NormalizedLandmark]] = [[ - NormalizedLandmark(x: 0.6146113, y: 0.71075666, z: -4.1557226e-07, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.6142792, y: 0.57649153, z: -0.040831544, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5836266, y: 0.4429407, z: -0.059525516, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5389037, y: 0.33637148, z: -0.07342299, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.536148, y: 0.25158498, z: -0.07771388, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4898765, y: 0.4913109, z: -0.030454714, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4276508, y: 0.50301707, z: -0.06859867, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.49330515, y: 0.52595127, z: -0.0773961, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.52693504, y: 0.5121813, z: -0.07744958, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4763346, y: 0.5743718, z: -0.023844246, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.42159313, y: 0.58094376, z: -0.06347593, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.50296295, y: 0.60153985, z: -0.057907313, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.52059495, y: 0.57536906, z: -0.046426427, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.47042432, y: 0.6498483, z: -0.025004275, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.42147171, y: 0.65280235, z: -0.069050804, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.49437872, y: 0.66357565, z: -0.046906527, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5176527, y: 0.6408466, z: -0.022207312, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.4691668, y: 0.7234682, z: -0.029635455, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.43116334, y: 0.7330426, z: -0.056126874, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.48526073, y: 0.7278307, z: -0.041881826, visibility: nil, presence: nil), - NormalizedLandmark(x: 0.5117951, y: 0.70887417, z: -0.024859443, visibility: nil, presence: nil) + NormalizedLandmark(x: 0.6129676, y: 0.70157504, z: -4.5833377e-07, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.6159242, y: 0.5730554, z: -0.04404007, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.58462656, y: 0.45141116, z: -0.066422015, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.54258853, y: 0.3550938, z: -0.08355088, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5299578, y: 0.27741316, z: -0.09152996, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.4884828, y: 0.48931584, z: -0.03891499, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.42707062, y: 0.5070781, z: -0.082204446, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.48659548, y: 0.52944756, z: -0.09566363, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5170652, y: 0.5180234, z: -0.097826585, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.47752064, y: 0.5746913, z: -0.030233975, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.42322388, y: 0.58384126, z: -0.06978146, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5008309, y: 0.6011655, z: -0.062682286, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5144273, y: 0.57651, z: -0.048970204, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.47189528, y: 0.65008116, z: -0.029931678, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.4212282, y: 0.6498341, z: -0.071003094, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.49262476, y: 0.65974927, z: -0.04700193, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5128528, y: 0.63937056, z: -0.020825379, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.47315174, y: 0.721069, z: -0.033766963, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.4348337, y: 0.7294104, z: -0.058631197, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.48701334, y: 0.7236482, z: -0.04348786, visibility: nil, presence: nil), + NormalizedLandmark(x: 0.5102773, y: 0.7046261, z: -0.02522209, visibility: nil, presence: nil) ]] func handLandmarkerWithModelPath( @@ -63,7 +63,8 @@ final class HandLandmarkerTests: XCTestCase { numHands: 1, minHandDetectionConfidence: HandLandmarkerTests.minHandDetectionConfidence, minHandPresenceConfidence: HandLandmarkerTests.minHandPresenceConfidence, - minTrackingConfidence: HandLandmarkerTests.minTrackingConfidence)! + minTrackingConfidence: HandLandmarkerTests.minTrackingConfidence, + delegate: .CPU)! return handLandmarkerService } diff --git a/examples/hand_landmarker/ios/Podfile b/examples/hand_landmarker/ios/Podfile index 86087fcf..5e751a26 100644 --- a/examples/hand_landmarker/ios/Podfile +++ b/examples/hand_landmarker/ios/Podfile @@ -5,7 +5,7 @@ target 'HandLandmarker' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' end diff --git a/examples/hand_landmarker/ios/Podfile.lock b/examples/hand_landmarker/ios/Podfile.lock index f859d05c..b9566432 100644 --- a/examples/hand_landmarker/ios/Podfile.lock +++ b/examples/hand_landmarker/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: 3c452c2e5aa9a7b985ff30e7f9ae7a2aa8e72e35 +PODFILE CHECKSUM: a81b42db9308e2e026806c52b5f3cdce29246c81 COCOAPODS: 1.14.3 diff --git a/examples/image_classification/ios/ImageClassifierTests/ImageClassifierTests.swift b/examples/image_classification/ios/ImageClassifierTests/ImageClassifierTests.swift index 4e3beef7..a390599a 100644 --- a/examples/image_classification/ios/ImageClassifierTests/ImageClassifierTests.swift +++ b/examples/image_classification/ios/ImageClassifierTests/ImageClassifierTests.swift @@ -73,7 +73,8 @@ final class ImageClassifierTests: XCTestCase { let imageClassifierService = ImageClassifierService.stillImageClassifierService( model: model, scoreThreshold: scoreThreshold, - maxResult: maxResult) + maxResult: maxResult, + delegate: .CPU) return imageClassifierService! } diff --git a/examples/image_classification/ios/Podfile b/examples/image_classification/ios/Podfile index 746b57ad..afc96d02 100644 --- a/examples/image_classification/ios/Podfile +++ b/examples/image_classification/ios/Podfile @@ -3,7 +3,7 @@ platform :ios, '15.0' target 'ImageClassifier' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' end target 'ImageClassifierTests' do diff --git a/examples/image_classification/ios/Podfile.lock b/examples/image_classification/ios/Podfile.lock index d1edbc87..b9df94ae 100644 --- a/examples/image_classification/ios/Podfile.lock +++ b/examples/image_classification/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: a0f3ecd4f45222b1e313361335ae51166a1808d3 +PODFILE CHECKSUM: 056325e6ad58feba6484ca5822378ec4ef78a74e COCOAPODS: 1.14.3 diff --git a/examples/image_embedder/ios/Podfile b/examples/image_embedder/ios/Podfile index 43cd7e27..d7188c5c 100644 --- a/examples/image_embedder/ios/Podfile +++ b/examples/image_embedder/ios/Podfile @@ -4,7 +4,7 @@ target 'ImageEmbedder' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' # Pods for ImageEmbedder end diff --git a/examples/image_embedder/ios/Podfile.lock b/examples/image_embedder/ios/Podfile.lock index cead3f5f..470f2518 100644 --- a/examples/image_embedder/ios/Podfile.lock +++ b/examples/image_embedder/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: 073d345d1c67efa6f7bdadad7c0c684aed30e061 +PODFILE CHECKSUM: a64a373fecc54efb0e385a4fed15585be18ce9fe COCOAPODS: 1.14.3 diff --git a/examples/image_segmentation/ios/ImageSegmenter/Services/SegmentedImageRenderer.swift b/examples/image_segmentation/ios/ImageSegmenter/Services/SegmentedImageRenderer.swift index 99419ec9..f555277e 100644 --- a/examples/image_segmentation/ios/ImageSegmenter/Services/SegmentedImageRenderer.swift +++ b/examples/image_segmentation/ios/ImageSegmenter/Services/SegmentedImageRenderer.swift @@ -20,8 +20,6 @@ class SegmentedImageRenderer { private var computePipelineState: MTLComputePipelineState? - var inputTexture2: MTLTexture? - private var textureCache: CVMetalTextureCache! let context: CIContext @@ -144,7 +142,6 @@ class SegmentedImageRenderer { let threadgroupsPerGrid = MTLSize(width: (inputTexture.width + width - 1) / width, height: (inputTexture.height + height - 1) / height, depth: 1) - print(threadgroupsPerGrid) commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup) commandEncoder.endEncoding() @@ -159,10 +156,10 @@ class SegmentedImageRenderer { /** This method merge frame of video with backgroud image using segment data and return an pixel buffer **/ - func render(ciImage: CIImage, segmentDatas: UnsafePointer?) -> CVPixelBuffer? { + func render(ciImage: CIImage, categoryMasks: UnsafePointer?) -> CVPixelBuffer? { - guard let segmentDatas = segmentDatas, isPrepared else { - print("segmentDatas not found") + guard let categoryMasks = categoryMasks else { + print("confidenceMasks not found") return nil } @@ -189,8 +186,6 @@ class SegmentedImageRenderer { textureDescriptor.usage = .unknown let inputScaleTexture = metalDevice.makeTexture(descriptor: textureDescriptor) - resizeTexture(sourceTexture: inputTexture2!, desTexture: inputScaleTexture!, targetSize: MTLSize(width: inputTexture.width, height: inputTexture.height, depth: 3), resizeMode: .scaleToFill) - // Set up command queue, buffer, and encoder. guard let commandQueue = commandQueue, let commandBuffer = commandQueue.makeCommandBuffer(), @@ -203,9 +198,8 @@ class SegmentedImageRenderer { commandEncoder.label = "Demo Metal" commandEncoder.setComputePipelineState(computePipelineState!) commandEncoder.setTexture(inputTexture, index: 0) - commandEncoder.setTexture(inputScaleTexture, index: 1) - commandEncoder.setTexture(outputTexture, index: 2) - let buffer = metalDevice.makeBuffer(bytes: segmentDatas, length: inputTexture.width * inputTexture.height * MemoryLayout.size)! + commandEncoder.setTexture(outputTexture, index: 1) + let buffer = metalDevice.makeBuffer(bytes: categoryMasks, length: inputTexture.width * inputTexture.height * MemoryLayout.size)! commandEncoder.setBuffer(buffer, offset: 0, index: 0) var imageWidth: Int = Int(inputTexture.width) commandEncoder.setBytes(&imageWidth, length: MemoryLayout.size, index: 1) diff --git a/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/CameraViewController.swift b/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/CameraViewController.swift index dd52635a..e1ac735d 100644 --- a/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/CameraViewController.swift +++ b/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/CameraViewController.swift @@ -228,8 +228,6 @@ extension CameraViewController: ImageSegmenterServiceLiveStreamDelegate { guard let imageSegmenterResult = result?.imageSegmenterResults.first as? ImageSegmenterResult, let confidenceMasks = imageSegmenterResult.categoryMask else { return } let confidenceMask = confidenceMasks.uint8Data -// let bytesArray = UnsafeBufferPointer(start: confidenceMask, count: confidenceMasks.width * confidenceMasks.height).map{$0} -// print(bytesArray[0], bytesArray[1]) if !render.isPrepared { render.prepare(with: formatDescription, outputRetainedBufferCountHint: 3) diff --git a/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/MediaLibraryViewController.swift b/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/MediaLibraryViewController.swift index ae261052..a643e33d 100644 --- a/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/MediaLibraryViewController.swift +++ b/examples/image_segmentation/ios/ImageSegmenter/ViewContoller/MediaLibraryViewController.swift @@ -205,7 +205,7 @@ extension MediaLibraryViewController: UIImagePickerControllerDelegate, UINavigat addPlayerViewControllerAsChild() guard let player = playerViewController?.player, let playerItem = player.currentItem else { return } let timeRange = CMTimeRange(start: .zero, duration: asset.duration) - var datas: [UnsafePointer] = [] +// var datas: [UnsafePointer] = [] let videoComposition = AVMutableVideoComposition(asset: asset) { [weak self] request in guard let self = self else { return } backgroundQueue.async { @@ -221,11 +221,9 @@ extension MediaLibraryViewController: UIImagePickerControllerDelegate, UINavigat } self.inferenceResultDeliveryDelegate?.didPerformInference(result: resultBundle) guard let result = resultBundle.imageSegmenterResults.first, let result = result else { return } - let marks = result.confidenceMasks - let _mark = marks![0] - let float32Data = _mark.float32Data - datas.append(float32Data) - guard let outputPixelBuffer = self.render.render(ciImage: sourceImage, segmentDatas: datas.removeFirst()) else { + let mark = result.categoryMask + let uint8Data = mark?.uint8Data + guard let outputPixelBuffer = self.render.render(ciImage: sourceImage, categoryMasks: uint8Data) else { request.finish(with: sourceImage, context: nil) return } diff --git a/examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITests.swift b/examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITests.swift deleted file mode 100644 index ccf7c1cd..00000000 --- a/examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITests.swift +++ /dev/null @@ -1,41 +0,0 @@ -// -// ImageSegmenterUITests.swift -// ImageSegmenterUITests -// -// Created by MBA0077 on 12/5/23. -// - -import XCTest - -final class ImageSegmenterUITests: XCTestCase { - - override func setUpWithError() throws { - // Put setup code here. This method is called before the invocation of each test method in the class. - - // In UI tests it is usually best to stop immediately when a failure occurs. - continueAfterFailure = false - - // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. - } - - override func tearDownWithError() throws { - // Put teardown code here. This method is called after the invocation of each test method in the class. - } - - func testExample() throws { - // UI tests must launch the application that they test. - let app = XCUIApplication() - app.launch() - - // Use XCTAssert and related functions to verify your tests produce the correct results. - } - - func testLaunchPerformance() throws { - if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) { - // This measures how long it takes to launch your application. - measure(metrics: [XCTApplicationLaunchMetric()]) { - XCUIApplication().launch() - } - } - } -} diff --git a/examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITestsLaunchTests.swift b/examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITestsLaunchTests.swift deleted file mode 100644 index 8b137891..00000000 --- a/examples/image_segmentation/ios/ImageSegmenterUITests/ImageSegmenterUITestsLaunchTests.swift +++ /dev/null @@ -1 +0,0 @@ - diff --git a/examples/image_segmentation/ios/Podfile b/examples/image_segmentation/ios/Podfile index fff4ee4b..86f37017 100644 --- a/examples/image_segmentation/ios/Podfile +++ b/examples/image_segmentation/ios/Podfile @@ -5,16 +5,6 @@ target 'ImageSegmenter' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision' + pod 'MediaPipeTasksVision', '0.10.14' - target 'ImageSegmenterTests' do - inherit! :search_paths - # Pods for testing - end end - -target 'ImageSegmenterUITests' do - inherit! :search_paths - # Pods for testing -end - diff --git a/examples/image_segmentation/ios/Podfile.lock b/examples/image_segmentation/ios/Podfile.lock index 3a56d67e..ff5c46fe 100644 --- a/examples/image_segmentation/ios/Podfile.lock +++ b/examples/image_segmentation/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: 5da73db60b6d26e627c8f1906095fe364c7d0a7f +PODFILE CHECKSUM: 47a33f379bd25db9573fe8b00f23769a70bbd652 COCOAPODS: 1.14.3 diff --git a/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj b/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj index 79a339ec..0beb01a3 100644 --- a/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj +++ b/examples/object_detection/ios/ObjectDetector.xcodeproj/project.pbxproj @@ -59,8 +59,6 @@ BF2B2D3C2A3C09E300589A11 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; BF2B2D412A3C09E300589A11 /* ObjectDetectorTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = ObjectDetectorTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; BF2B2D452A3C09E300589A11 /* ObjectDetectorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ObjectDetectorTests.swift; sourceTree = ""; }; - BF2B2D4F2A3C09E300589A11 /* ObjectDetectorUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ObjectDetectorUITests.swift; sourceTree = ""; }; - BF2B2D512A3C09E300589A11 /* ObjectDetectorUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ObjectDetectorUITestsLaunchTests.swift; sourceTree = ""; }; BF2B2D602A3C0C2900589A11 /* CameraFeedService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraFeedService.swift; sourceTree = ""; }; BF2B2D692A3C0C3F00589A11 /* BottomSheetViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BottomSheetViewController.swift; sourceTree = ""; }; BF2B2D6C2A3C0EF900589A11 /* ObjectDetectorService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ObjectDetectorService.swift; sourceTree = ""; }; @@ -156,7 +154,6 @@ children = ( BF2B2D2D2A3C09E200589A11 /* ObjectDetector */, BF2B2D442A3C09E300589A11 /* ObjectDetectorTests */, - BF2B2D4E2A3C09E300589A11 /* ObjectDetectorUITests */, BF2B2D2C2A3C09E200589A11 /* Products */, A5DDC8DF7C2365A6B2C04BE4 /* Pods */, 56B60B47FC83404E96CF4A2B /* Frameworks */, @@ -201,15 +198,6 @@ path = ObjectDetectorTests; sourceTree = ""; }; - BF2B2D4E2A3C09E300589A11 /* ObjectDetectorUITests */ = { - isa = PBXGroup; - children = ( - BF2B2D4F2A3C09E300589A11 /* ObjectDetectorUITests.swift */, - BF2B2D512A3C09E300589A11 /* ObjectDetectorUITestsLaunchTests.swift */, - ); - path = ObjectDetectorUITests; - sourceTree = ""; - }; BF2B2D672A3C0C3F00589A11 /* ViewControllers */ = { isa = PBXGroup; children = ( @@ -570,7 +558,7 @@ CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = H83UK2M7VU; + DEVELOPMENT_TEAM = ""; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = ObjectDetector/Info.plist; INFOPLIST_KEY_NSCameraUsageDescription = "This app uses camera to detection the objects that appear in the camera feed."; @@ -603,7 +591,7 @@ CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = H83UK2M7VU; + DEVELOPMENT_TEAM = ""; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = ObjectDetector/Info.plist; INFOPLIST_KEY_NSCameraUsageDescription = "This app uses camera to detection the objects that appear in the camera feed."; diff --git a/examples/object_detection/ios/ObjectDetectorTests/ObjectDetectorTests.swift b/examples/object_detection/ios/ObjectDetectorTests/ObjectDetectorTests.swift index 5e60f878..d272a97e 100644 --- a/examples/object_detection/ios/ObjectDetectorTests/ObjectDetectorTests.swift +++ b/examples/object_detection/ios/ObjectDetectorTests/ObjectDetectorTests.swift @@ -26,31 +26,31 @@ final class ObjectDetectorTests: XCTestCase { static let efficientnetLite0Results: [Detection] = [ Detection( - categories: [ResultCategory(index: -1, score: 0.9396609, categoryName: "person", displayName: nil)], - boundingBox: CGRect(x: 214.0, y: 11.0, width: 120.0, height: 262.0), + categories: [ResultCategory(index: -1, score: 0.941339, categoryName: "person", displayName: nil)], + boundingBox: CGRect(x: 213.0, y: 11.0, width: 120.0, height: 261.0), keypoints: nil), Detection( - categories: [ResultCategory(index: -1, score: 0.77432173, categoryName: "dog", displayName: nil)], - boundingBox: CGRect(x: 66.0, y: 254.0, width: 57.0, height: 77.0), + categories: [ResultCategory(index: -1, score: 0.77810854, categoryName: "dog", displayName: nil)], + boundingBox: CGRect(x: 67.0, y: 254.0, width: 56.0, height: 76.0), keypoints: nil), Detection( - categories: [ResultCategory(index: -1, score: 0.6645179, categoryName: "person", displayName: nil)], - boundingBox: CGRect(x: 144.0, y: 18.0, width: 82.0, height: 243.0), + categories: [ResultCategory(index: -1, score: 0.6601211, categoryName: "person", displayName: nil)], + boundingBox: CGRect(x: 144.0, y: 18.0, width: 82.0, height: 242.0), keypoints: nil) ] static let efficientnetLite2Results: [Detection] = [ Detection( - categories: [ResultCategory(index: -1, score: 0.917458951, categoryName: "dog", displayName: nil)], - boundingBox: CGRect(x: 71.0, y: 254.0, width: 53.0, height: 74.0), + categories: [ResultCategory(index: -1, score: 0.92731416, categoryName: "dog", displayName: nil)], + boundingBox: CGRect(x: 71.0, y: 254.0, width: 52.0, height: 74.0), keypoints: nil), Detection( - categories: [ResultCategory(index: -1, score: 0.90494984, categoryName: "person", displayName: nil)], - boundingBox: CGRect(x: 207.0, y: 8.0, width: 126.0, height: 263.0), + categories: [ResultCategory(index: -1, score: 0.9009373, categoryName: "person", displayName: nil)], + boundingBox: CGRect(x: 207.0, y: 7.0, width: 126.0, height: 264.0), keypoints: nil), Detection( - categories: [ResultCategory(index: -1, score: 0.810646474, categoryName: "person", displayName: nil)], - boundingBox: CGRect(x: 147.0, y: 13.0, width: 81.0, height: 254.0), + categories: [ResultCategory(index: -1, score: 0.8202129, categoryName: "person", displayName: nil)], + boundingBox: CGRect(x: 147.0, y: 13.0, width: 81.0, height: 255.0), keypoints: nil) ] @@ -60,7 +60,8 @@ final class ObjectDetectorTests: XCTestCase { let objectDetectorHelper = ObjectDetectorService.stillImageDetectorService( model: model, maxResults: 3, - scoreThreshold: 0 + scoreThreshold: 0, + delegate: .CPU ) return try XCTUnwrap(objectDetectorHelper) } diff --git a/examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITests.swift b/examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITests.swift deleted file mode 100644 index 01bbcb1b..00000000 --- a/examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITests.swift +++ /dev/null @@ -1,41 +0,0 @@ -// -// ObjectDetectorUITests.swift -// ObjectDetectorUITests -// -// Created by MBA0077 on 6/16/23. -// - -import XCTest - -final class ObjectDetectorUITests: XCTestCase { - - override func setUpWithError() throws { - // Put setup code here. This method is called before the invocation of each test method in the class. - - // In UI tests it is usually best to stop immediately when a failure occurs. - continueAfterFailure = false - - // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. - } - - override func tearDownWithError() throws { - // Put teardown code here. This method is called after the invocation of each test method in the class. - } - - func testExample() throws { - // UI tests must launch the application that they test. - let app = XCUIApplication() - app.launch() - - // Use XCTAssert and related functions to verify your tests produce the correct results. - } - - func testLaunchPerformance() throws { - if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) { - // This measures how long it takes to launch your application. - measure(metrics: [XCTApplicationLaunchMetric()]) { - XCUIApplication().launch() - } - } - } -} diff --git a/examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITestsLaunchTests.swift b/examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITestsLaunchTests.swift deleted file mode 100644 index 3e063919..00000000 --- a/examples/object_detection/ios/ObjectDetectorUITests/ObjectDetectorUITestsLaunchTests.swift +++ /dev/null @@ -1,32 +0,0 @@ -// -// ObjectDetectorUITestsLaunchTests.swift -// ObjectDetectorUITests -// -// Created by MBA0077 on 6/16/23. -// - -import XCTest - -final class ObjectDetectorUITestsLaunchTests: XCTestCase { - - override class var runsForEachTargetApplicationUIConfiguration: Bool { - true - } - - override func setUpWithError() throws { - continueAfterFailure = false - } - - func testLaunch() throws { - let app = XCUIApplication() - app.launch() - - // Insert steps here to perform after app launch but before taking a screenshot, - // such as logging into a test account or navigating somewhere in the app - - let attachment = XCTAttachment(screenshot: app.screenshot()) - attachment.name = "Launch Screen" - attachment.lifetime = .keepAlways - add(attachment) - } -} diff --git a/examples/object_detection/ios/Podfile b/examples/object_detection/ios/Podfile index bed78f55..9d263b3e 100644 --- a/examples/object_detection/ios/Podfile +++ b/examples/object_detection/ios/Podfile @@ -3,7 +3,7 @@ platform :ios, '15.0' target 'ObjectDetector' do use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' # Pods for ObjectDetector end diff --git a/examples/object_detection/ios/Podfile.lock b/examples/object_detection/ios/Podfile.lock index 1d32f2c8..ece6afde 100644 --- a/examples/object_detection/ios/Podfile.lock +++ b/examples/object_detection/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: 29f14d152cc8d5ead847a890518b7de9bb9955fe +PODFILE CHECKSUM: 101d18962053172fe8f8251d4b2b56fba67948a8 COCOAPODS: 1.14.3 diff --git a/examples/pose_landmarker/ios/Podfile b/examples/pose_landmarker/ios/Podfile index 0ae72082..9f6c668b 100644 --- a/examples/pose_landmarker/ios/Podfile +++ b/examples/pose_landmarker/ios/Podfile @@ -4,7 +4,7 @@ target 'PoseLandmarker' do # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - pod 'MediaPipeTasksVision', '0.10.12' + pod 'MediaPipeTasksVision', '0.10.14' end target 'PoseLandmarkerTests' do diff --git a/examples/pose_landmarker/ios/Podfile.lock b/examples/pose_landmarker/ios/Podfile.lock index 85292aa3..2309c204 100644 --- a/examples/pose_landmarker/ios/Podfile.lock +++ b/examples/pose_landmarker/ios/Podfile.lock @@ -1,10 +1,10 @@ PODS: - - MediaPipeTasksCommon (0.10.12) - - MediaPipeTasksVision (0.10.12): - - MediaPipeTasksCommon (= 0.10.12) + - MediaPipeTasksCommon (0.10.14) + - MediaPipeTasksVision (0.10.14): + - MediaPipeTasksCommon (= 0.10.14) DEPENDENCIES: - - MediaPipeTasksVision (= 0.10.12) + - MediaPipeTasksVision (= 0.10.14) SPEC REPOS: trunk: @@ -12,9 +12,9 @@ SPEC REPOS: - MediaPipeTasksVision SPEC CHECKSUMS: - MediaPipeTasksCommon: 254e6bff77804b262f6ecf180477142ea551e802 - MediaPipeTasksVision: 78d5c47cd7996b4d815bacba0a52dbf01458dfaf + MediaPipeTasksCommon: 5660099c2dd81f7ac4a7a5f51055785ead8e0e64 + MediaPipeTasksVision: 0fac0db83c0b45e4d7811a9227be5e571403cf83 -PODFILE CHECKSUM: 1e7f59eff42749a531528f7423d8835b804da38f +PODFILE CHECKSUM: 109f9a21c0ab4fba8e5a9b73fb9b58655ab46a0b COCOAPODS: 1.14.3