diff --git a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen index f3b74900c9..2e3a5464ce 100644 --- a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen +++ b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen @@ -23,21 +23,21 @@ "mediapipe/objc/testing/app/BUILD" ], "buildTargets" : [ - "//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp", - "//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp", - "//mediapipe/examples/ios/faceeffect:FaceEffectApp", - "//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp", - "//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp", - "//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp", - "//mediapipe/examples/ios/helloworld:HelloWorldApp", - "//mediapipe/examples/ios/holistictrackinggpu:HolisticTrackingGpuApp", - "//mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp", - "//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp", - "//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp", - "//mediapipe/examples/ios/objectdetectiontrackinggpu:ObjectDetectionTrackingGpuApp", - "//mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp", - "//mediapipe/examples/ios/selfiesegmentationgpu:SelfieSegmentationGpuApp", - "//mediapipe/objc:mediapipe_framework_ios" + "@//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp", + "@//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp", + "@//mediapipe/examples/ios/faceeffect:FaceEffectApp", + "@//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp", + "@//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp", + "@//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp", + "@//mediapipe/examples/ios/helloworld:HelloWorldApp", + "@//mediapipe/examples/ios/holistictrackinggpu:HolisticTrackingGpuApp", + "@//mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp", + "@//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp", + "@//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp", + "@//mediapipe/examples/ios/objectdetectiontrackinggpu:ObjectDetectionTrackingGpuApp", + "@//mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp", + "@//mediapipe/examples/ios/selfiesegmentationgpu:SelfieSegmentationGpuApp", + "@//mediapipe/objc:mediapipe_framework_ios" ], "optionSet" : { "BazelBuildOptionsDebug" : { diff --git a/mediapipe/examples/ios/common/CommonViewController.mm b/mediapipe/examples/ios/common/CommonViewController.mm index f6c47eacfc..84d25d30e5 100644 --- a/mediapipe/examples/ios/common/CommonViewController.mm +++ b/mediapipe/examples/ios/common/CommonViewController.mm @@ -112,6 +112,7 @@ - (void)viewWillAppear:(BOOL)animated { withExtension:@"mov"]]; self.videoSource = [[MPPPlayerInputSource alloc] initWithAVAsset:video]; [self.videoSource setDelegate:self queue:self.videoQueue]; + [self startGraph]; dispatch_async(self.videoQueue, ^{ [self.videoSource start]; }); @@ -149,7 +150,7 @@ - (void)viewWillAppear:(BOOL)animated { } } -- (void)startGraphAndCamera { +- (void)startGraph { // Start running self.mediapipeGraph. NSError* error; if (![self.mediapipeGraph startWithError:&error]) { @@ -158,6 +159,10 @@ - (void)startGraphAndCamera { else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) { NSLog(@"Failed to complete graph initial run: %@", error); } +} + +- (void)startGraphAndCamera { + [self startGraph]; // Start fetching frames from the camera. dispatch_async(self.videoQueue, ^{