diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index fa4e97db269..7e16bdfffff 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.9.19+2 + +* Adds the `Camera` Swift protocol. +* Adds `DefaultCamera`, a `FLTCam`-based implementation of the `Camera` protocol. +* Migrates sample buffer delegates and `FlutterTexture` protocol implementations to `DefaultCamera`. + ## 0.9.19+1 * Adds `audioCaptureDeviceFactory` to `FLTCamConfiguration`. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift index 22d6b9485ae..e21497d1a1e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.swift @@ -4,11 +4,11 @@ import XCTest +@testable import camera_avfoundation + // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) import camera_avfoundation_objc -#else - import camera_avfoundation #endif /// Utils for creating default class instances used in tests @@ -81,16 +81,16 @@ enum CameraTestUtils { return configuration } - static func createTestCamera(_ configuration: FLTCamConfiguration) -> FLTCam { - return FLTCam(configuration: configuration, error: nil) + static func createTestCamera(_ configuration: FLTCamConfiguration) -> DefaultCamera { + return DefaultCamera(configuration: configuration, error: nil) } - static func createTestCamera() -> FLTCam { + static func createTestCamera() -> DefaultCamera { return createTestCamera(createTestCameraConfiguration()) } static func createCameraWithCaptureSessionQueue(_ captureSessionQueue: DispatchQueue) - -> FLTCam + -> DefaultCamera { let configuration = createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift index f60d4a43f2c..20952741f71 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamExposureTests.swift @@ -12,7 +12,7 @@ import XCTest #endif final class FLTCamExposureTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice, MockDeviceOrientationProvider) { + private func createCamera() -> (Camera, MockCaptureDevice, MockDeviceOrientationProvider) { let mockDevice = MockCaptureDevice() let mockDeviceOrientationProvider = MockDeviceOrientationProvider() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift index db82db82446..580ebdab109 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamFocusTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamSetFocusModeTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice, MockDeviceOrientationProvider) { + private func createCamera() -> (Camera, MockCaptureDevice, MockDeviceOrientationProvider) { let mockDevice = MockCaptureDevice() let mockDeviceOrientationProvider = MockDeviceOrientationProvider() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift index 762152db317..cd8d5d858a8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetDeviceOrientationTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamSetDeviceOrientationTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureConnection, MockCaptureConnection) { + private func createCamera() -> (Camera, MockCaptureConnection, MockCaptureConnection) { let camera = CameraTestUtils.createTestCamera() let mockCapturePhotoOutput = MockCapturePhotoOutput() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift index a6e51dd9b7e..29ade54f7fe 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSetFlashModeTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamSetFlashModeTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice, MockCapturePhotoOutput) { + private func createCamera() -> (Camera, MockCaptureDevice, MockCapturePhotoOutput) { let mockDevice = MockCaptureDevice() let mockCapturePhotoOutput = MockCapturePhotoOutput() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift index 8cc507d9dbb..618d87edde3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamZoomTests.swift @@ -13,7 +13,7 @@ import XCTest #endif final class FLTCamZoomTests: XCTestCase { - private func createCamera() -> (FLTCam, MockCaptureDevice) { + private func createCamera() -> (Camera, MockCaptureDevice) { let mockDevice = MockCaptureDevice() let configuration = CameraTestUtils.createTestCameraConfiguration() diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift index 14e30643e3a..3f719dacdb2 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCamera.swift @@ -2,14 +2,14 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -import camera_avfoundation +@testable import camera_avfoundation // Import Objectice-C part of the implementation when SwiftPM is used. #if canImport(camera_avfoundation_objc) import camera_avfoundation_objc #endif -final class MockCamera: FLTCam { +final class MockCamera: NSObject, Camera { var setDartApiStub: ((FCPCameraEventApi?) -> Void)? var setOnFrameAvailableStub: (((() -> Void)?) -> Void)? var getMinimumExposureOffsetStub: (() -> CGFloat)? @@ -42,7 +42,7 @@ final class MockCamera: FLTCam { var startImageStreamStub: ((FlutterBinaryMessenger) -> Void)? var stopImageStreamStub: (() -> Void)? - override var dartAPI: FCPCameraEventApi { + var dartAPI: FCPCameraEventApi? { get { preconditionFailure("Attempted to access unimplemented property: dartAPI") } @@ -51,7 +51,7 @@ final class MockCamera: FLTCam { } } - override var onFrameAvailable: (() -> Void) { + var onFrameAvailable: (() -> Void)? { get { preconditionFailure("Attempted to access unimplemented property: onFrameAvailable") } @@ -60,147 +60,149 @@ final class MockCamera: FLTCam { } } - override var minimumExposureOffset: CGFloat { + var videoFormat: FourCharCode = kCVPixelFormatType_32BGRA + + var isPreviewPaused: Bool = false + + var minimumExposureOffset: CGFloat { return getMinimumExposureOffsetStub?() ?? 0 } - override var maximumExposureOffset: CGFloat { + var maximumExposureOffset: CGFloat { return getMaximumExposureOffsetStub?() ?? 0 } - override var minimumAvailableZoomFactor: CGFloat { + var minimumAvailableZoomFactor: CGFloat { return getMinimumAvailableZoomFactorStub?() ?? 0 } - override var maximumAvailableZoomFactor: CGFloat { + var maximumAvailableZoomFactor: CGFloat { return getMaximumAvailableZoomFactorStub?() ?? 0 } - override func setUpCaptureSessionForAudioIfNeeded() { + func setUpCaptureSessionForAudioIfNeeded() { setUpCaptureSessionForAudioIfNeededStub?() } - override func reportInitializationState() {} + func reportInitializationState() {} - override func receivedImageStreamData() { + func receivedImageStreamData() { receivedImageStreamDataStub?() } - override func start() { + func start() { startStub?() } - override func stop() {} + func stop() {} - override func startVideoRecording( + func startVideoRecording( completion: @escaping (FlutterError?) -> Void, messengerForStreaming messenger: FlutterBinaryMessenger? ) { startVideoRecordingStub?(completion, messenger) } - override func pauseVideoRecording() { + func pauseVideoRecording() { pauseVideoRecordingStub?() } - override func resumeVideoRecording() { + func resumeVideoRecording() { resumeVideoRecordingStub?() } - override func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { + func stopVideoRecording(completion: @escaping (String?, FlutterError?) -> Void) { stopVideoRecordingStub?(completion) } - override func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { + func captureToFile(completion: @escaping (String?, FlutterError?) -> Void) { captureToFileStub?(completion) } - override func setDeviceOrientation(_ orientation: UIDeviceOrientation) { + func setDeviceOrientation(_ orientation: UIDeviceOrientation) { setDeviceOrientationStub?(orientation) } - override func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) { + func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) { lockCaptureOrientationStub?(orientation) } - override func unlockCaptureOrientation() { + func unlockCaptureOrientation() { unlockCaptureOrientationStub?() } - override func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { + func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) { setImageFileFormatStub?(fileFormat) } - override func setExposureMode(_ mode: FCPPlatformExposureMode) { + func setExposureMode(_ mode: FCPPlatformExposureMode) { setExposureModeStub?(mode) } - override func setExposureOffset(_ offset: Double) { + func setExposureOffset(_ offset: Double) { setExposureOffsetStub?(offset) } - override func setExposurePoint( + func setExposurePoint( _ point: FCPPlatformPoint?, withCompletion: @escaping (FlutterError?) -> Void ) { setExposurePointStub?(point, withCompletion) } - override func setFocusMode(_ mode: FCPPlatformFocusMode) { + func setFocusMode(_ mode: FCPPlatformFocusMode) { setFocusModeStub?(mode) } - override func setFocusPoint( - _ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void - ) { + func setFocusPoint(_ point: FCPPlatformPoint?, completion: @escaping (FlutterError?) -> Void) { setFocusPointStub?(point, completion) } - override func setZoomLevel( + func setZoomLevel( _ zoom: CGFloat, withCompletion completion: @escaping (FlutterError?) -> Void ) { setZoomLevelStub?(zoom, completion) } - override func setFlashMode( + func setFlashMode( _ mode: FCPPlatformFlashMode, withCompletion completion: @escaping (FlutterError?) -> Void ) { setFlashModeStub?(mode, completion) } - override func pausePreview() { + func pausePreview() { pausePreviewStub?() } - override func resumePreview() { + func resumePreview() { resumePreviewStub?() } - override func setDescriptionWhileRecording( + func setDescriptionWhileRecording( _ cameraName: String, withCompletion completion: @escaping (FlutterError?) -> Void ) { setDescriptionWhileRecordingStub?(cameraName, completion) } - override func startImageStream(with messenger: FlutterBinaryMessenger) { + func startImageStream(with messenger: FlutterBinaryMessenger) { startImageStreamStub?(messenger) } - override func stopImageStream() { + func stopImageStream() { stopImageStreamStub?() } - override func captureOutput( + func captureOutput( _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection ) {} - override func close() {} + func close() {} - override func copyPixelBuffer() -> Unmanaged? { + func copyPixelBuffer() -> Unmanaged? { return nil } } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift index 93915529f74..ad4b99c86b7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/PhotoCaptureTests.swift @@ -14,7 +14,7 @@ import XCTest /// Includes test cases related to photo capture operations for FLTCam class. final class PhotoCaptureTests: XCTestCase { - private func createCam(with captureSessionQueue: DispatchQueue) -> FLTCam { + private func createCam(with captureSessionQueue: DispatchQueue) -> DefaultCamera { let configuration = CameraTestUtils.createTestCameraConfiguration() configuration.captureSessionQueue = captureSessionQueue return CameraTestUtils.createTestCamera(configuration) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift index bea41564eb0..eeef97b292a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/SampleBufferTests.swift @@ -69,7 +69,7 @@ private class FakeMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper { /// Includes test cases related to sample buffer handling for FLTCam class. final class CameraSampleBufferTests: XCTestCase { private func createCamera() -> ( - FLTCam, + DefaultCamera, MockAssetWriter, MockAssetWriterInputPixelBufferAdaptor, MockAssetWriterInput diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift index 129638c5356..edb7a5fc479 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTests.swift @@ -33,7 +33,7 @@ private class MockImageStreamHandler: FLTImageStreamHandler { final class StreamingTests: XCTestCase { private func createCamera() -> ( - FLTCam, + DefaultCamera, AVCaptureOutput, CMSampleBuffer, AVCaptureConnection diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift new file mode 100644 index 00000000000..7fdde60680d --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Camera.swift @@ -0,0 +1,101 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation +import CoreMotion +import Flutter + +// Import Objectice-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +/// A class that manages camera's state and performs camera operations. +protocol Camera: FlutterTexture, AVCaptureVideoDataOutputSampleBufferDelegate, + AVCaptureAudioDataOutputSampleBufferDelegate +{ + /// The API instance used to communicate with the Dart side of the plugin. + /// Once initially set, this should only ever be accessed on the main thread. + var dartAPI: FCPCameraEventApi? { get set } + + var onFrameAvailable: (() -> Void)? { get set } + + /// Format used for video and image streaming. + var videoFormat: FourCharCode { get set } + + var isPreviewPaused: Bool { get } + + var minimumAvailableZoomFactor: CGFloat { get } + var maximumAvailableZoomFactor: CGFloat { get } + var minimumExposureOffset: CGFloat { get } + var maximumExposureOffset: CGFloat { get } + + func setUpCaptureSessionForAudioIfNeeded() + + func reportInitializationState() + + /// Acknowledges the receipt of one image stream frame. + func receivedImageStreamData() + + func start() + func stop() + + /// Starts recording a video with an optional streaming messenger. + func startVideoRecording( + completion: @escaping (_ error: FlutterError?) -> Void, + messengerForStreaming: FlutterBinaryMessenger? + ) + func pauseVideoRecording() + func resumeVideoRecording() + func stopVideoRecording(completion: @escaping (_ path: String?, _ error: FlutterError?) -> Void) + + func captureToFile(completion: @escaping (_ path: String?, _ error: FlutterError?) -> Void) + + func setDeviceOrientation(_ orientation: UIDeviceOrientation) + func lockCaptureOrientation(_ orientation: FCPPlatformDeviceOrientation) + func unlockCaptureOrientation() + + func setImageFileFormat(_ fileFormat: FCPPlatformImageFileFormat) + + func setExposureMode(_ mode: FCPPlatformExposureMode) + func setExposureOffset(_ offset: Double) + func setExposurePoint( + _ point: FCPPlatformPoint?, + withCompletion: @escaping (_ error: FlutterError?) -> Void + ) + + func setFocusMode(_ mode: FCPPlatformFocusMode) + func setFocusPoint( + _ point: FCPPlatformPoint?, + completion: @escaping (_ error: FlutterError?) -> Void + ) + + func setZoomLevel(_ zoom: CGFloat, withCompletion: @escaping (_ error: FlutterError?) -> Void) + + func setFlashMode( + _ mode: FCPPlatformFlashMode, + withCompletion: @escaping (_ error: FlutterError?) -> Void + ) + + func pausePreview() + func resumePreview() + + func setDescriptionWhileRecording( + _ cameraName: String, + withCompletion: @escaping (_ error: FlutterError?) -> Void + ) + + func startImageStream(with: FlutterBinaryMessenger) + func stopImageStream() + + // Override to make `AVCaptureVideoDataOutputSampleBufferDelegate`/ + // `AVCaptureAudioDataOutputSampleBufferDelegate` method non optional + override func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) + + func close() +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift index 570cd22095e..62d570a578f 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.swift @@ -24,7 +24,7 @@ public final class CameraPlugin: NSObject, FlutterPlugin { private let captureSessionQueue: DispatchQueue /// An internal camera object that manages camera's state and performs camera operations. - var camera: FLTCam? + var camera: Camera? public static func register(with registrar: FlutterPluginRegistrar) { let instance = CameraPlugin( @@ -248,7 +248,7 @@ extension CameraPlugin: FCPCameraApi { ) var error: NSError? - let newCamera = FLTCam(configuration: camConfiguration, error: &error) + let newCamera = DefaultCamera(configuration: camConfiguration, error: &error) if let error = error { completion(nil, CameraPlugin.flutterErrorFromNSError(error)) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift new file mode 100644 index 00000000000..5fbb65cfc6a --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -0,0 +1,265 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import CoreMotion + +// Import Objectice-C part of the implementation when SwiftPM is used. +#if canImport(camera_avfoundation_objc) + import camera_avfoundation_objc +#endif + +final class DefaultCamera: FLTCam, Camera { + /// The queue on which `latestPixelBuffer` property is accessed. + /// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`. + private let pixelBufferSynchronizationQueue = DispatchQueue( + label: "io.flutter.camera.pixelBufferSynchronizationQueue") + + /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. + /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. + private var latestPixelBuffer: CVPixelBuffer? + private var lastVideoSampleTime = CMTime.zero + private var lastAudioSampleTime = CMTime.zero + + /// Maximum number of frames pending processing. + /// To limit memory consumption, limit the number of frames pending processing. + /// After some testing, 4 was determined to be the best maximuĊ„m value. + /// https://github.com/flutter/plugins/pull/4520#discussion_r766335637 + private var maxStreamingPendingFramesCount = 4 + + func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { + if output == captureVideoOutput.avOutput { + if let newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { + + pixelBufferSynchronizationQueue.sync { + latestPixelBuffer = newBuffer + } + + onFrameAvailable?() + } + } + + guard CMSampleBufferDataIsReady(sampleBuffer) else { + reportErrorMessage("sample buffer is not ready. Skipping sample") + return + } + + if isStreamingImages { + if let eventSink = imageStreamHandler?.eventSink, + streamingPendingFramesCount < maxStreamingPendingFramesCount + { + streamingPendingFramesCount += 1 + + let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! + // Must lock base address before accessing the pixel data + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + + let imageWidth = CVPixelBufferGetWidth(pixelBuffer) + let imageHeight = CVPixelBufferGetHeight(pixelBuffer) + + var planes: [[String: Any]] = [] + + let isPlanar = CVPixelBufferIsPlanar(pixelBuffer) + let planeCount = isPlanar ? CVPixelBufferGetPlaneCount(pixelBuffer) : 1 + + for i in 0.. 0 { + currentSampleTime = CMTimeAdd(currentSampleTime, dur) + } + + if audioIsDisconnected { + audioIsDisconnected = false + + audioTimeOffset = + audioTimeOffset.value == 0 + ? CMTimeSubtract(currentSampleTime, lastAudioSampleTime) + : CMTimeAdd(audioTimeOffset, CMTimeSubtract(currentSampleTime, lastAudioSampleTime)) + + return + } + + lastAudioSampleTime = currentSampleTime + + if audioTimeOffset.value != 0 { + if let adjustedSampleBuffer = copySampleBufferWithAdjustedTime( + sampleBuffer, + by: audioTimeOffset) + { + newAudioSample(adjustedSampleBuffer) + } + } else { + newAudioSample(sampleBuffer) + } + } + } + } + + private func copySampleBufferWithAdjustedTime(_ sample: CMSampleBuffer, by offset: CMTime) + -> CMSampleBuffer? + { + var count: CMItemCount = 0 + CMSampleBufferGetSampleTimingInfoArray( + sample, entryCount: 0, arrayToFill: nil, entriesNeededOut: &count) + + let timingInfo = UnsafeMutablePointer.allocate(capacity: Int(count)) + defer { timingInfo.deallocate() } + + CMSampleBufferGetSampleTimingInfoArray( + sample, entryCount: count, arrayToFill: timingInfo, entriesNeededOut: &count) + + for i in 0.. Unmanaged? { + var pixelBuffer: CVPixelBuffer? + pixelBufferSynchronizationQueue.sync { + pixelBuffer = latestPixelBuffer + latestPixelBuffer = nil + } + + if let buffer = pixelBuffer { + return Unmanaged.passRetained(buffer) + } else { + return nil + } + } + + private func reportErrorMessage(_ errorMessage: String) { + FLTEnsureToRunOnMainQueue { [weak self] in + self?.dartAPI?.reportError(errorMessage) { _ in + // Ignore any errors, as this is just an event broadcast. + } + } + } +} diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m index 1aa6a4598ea..e67c586eda1 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m @@ -32,48 +32,22 @@ @interface FLTCam () *videoCaptureSession; @property(readonly, nonatomic) NSObject *audioCaptureSession; @property(readonly, nonatomic) NSObject *captureVideoInput; -/// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. -/// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. -@property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer; @property(readonly, nonatomic) CGSize captureSize; -@property(strong, nonatomic) NSObject *videoWriter; -@property(strong, nonatomic) NSObject *videoWriterInput; -@property(strong, nonatomic) NSObject *audioWriterInput; @property(strong, nonatomic) NSObject *assetWriterPixelBufferAdaptor; @property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput; @property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput; @property(strong, nonatomic) NSString *videoRecordingPath; -@property(assign, nonatomic) BOOL isFirstVideoSample; -@property(assign, nonatomic) BOOL isRecording; -@property(assign, nonatomic) BOOL isRecordingPaused; -@property(assign, nonatomic) BOOL videoIsDisconnected; -@property(assign, nonatomic) BOOL audioIsDisconnected; @property(assign, nonatomic) BOOL isAudioSetup; -/// Number of frames currently pending processing. -@property(assign, nonatomic) int streamingPendingFramesCount; - -/// Maximum number of frames pending processing. -@property(assign, nonatomic) int maxStreamingPendingFramesCount; - @property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation; -@property(assign, nonatomic) CMTime lastVideoSampleTime; -@property(assign, nonatomic) CMTime lastAudioSampleTime; -@property(assign, nonatomic) CMTime videoTimeOffset; -@property(assign, nonatomic) CMTime audioTimeOffset; @property(nonatomic) CMMotionManager *motionManager; -@property NSObject *videoAdaptor; /// All FLTCam's state access and capture session related operations should be on run on this queue. @property(strong, nonatomic) dispatch_queue_t captureSessionQueue; -/// The queue on which `latestPixelBuffer` property is accessed. -/// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`. -@property(strong, nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue; /// The queue on which captured photos (not videos) are written to disk. /// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation. @property(strong, nonatomic) dispatch_queue_t photoIOQueue; @@ -109,8 +83,6 @@ - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configurati _mediaSettingsAVWrapper = configuration.mediaSettingsWrapper; _captureSessionQueue = configuration.captureSessionQueue; - _pixelBufferSynchronizationQueue = - dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL); _photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL); _videoCaptureSession = configuration.videoCaptureSession; _audioCaptureSession = configuration.audioCaptureSession; @@ -132,11 +104,6 @@ - (instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configurati _assetWriterFactory = configuration.assetWriterFactory; _inputPixelBufferAdaptorFactory = configuration.inputPixelBufferAdaptorFactory; - // To limit memory consumption, limit the number of frames pending processing. - // After some testing, 4 was determined to be the best maximum value. - // https://github.com/flutter/plugins/pull/4520#discussion_r766335637 - _maxStreamingPendingFramesCount = 4; - NSError *localError = nil; AVCaptureConnection *connection = [self createConnection:&localError]; if (localError) { @@ -511,227 +478,6 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset return bestFormat; } -- (void)captureOutput:(AVCaptureOutput *)output - didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection { - if (output == _captureVideoOutput.avOutput) { - CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CFRetain(newBuffer); - - __block CVPixelBufferRef previousPixelBuffer = nil; - // Use `dispatch_sync` to avoid unnecessary context switch under common non-contest scenarios; - // Under rare contest scenarios, it will not block for too long since the critical section is - // quite lightweight. - dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ - // No need weak self because it's dispatch_sync. - previousPixelBuffer = self.latestPixelBuffer; - self.latestPixelBuffer = newBuffer; - }); - if (previousPixelBuffer) { - CFRelease(previousPixelBuffer); - } - if (_onFrameAvailable) { - _onFrameAvailable(); - } - } - if (!CMSampleBufferDataIsReady(sampleBuffer)) { - [self reportErrorMessage:@"sample buffer is not ready. Skipping sample"]; - return; - } - if (_isStreamingImages) { - FlutterEventSink eventSink = _imageStreamHandler.eventSink; - if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) { - self.streamingPendingFramesCount++; - CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - // Must lock base address before accessing the pixel data - CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer); - size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer); - - NSMutableArray *planes = [NSMutableArray array]; - - const Boolean isPlanar = CVPixelBufferIsPlanar(pixelBuffer); - size_t planeCount; - if (isPlanar) { - planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); - } else { - planeCount = 1; - } - - for (int i = 0; i < planeCount; i++) { - void *planeAddress; - size_t bytesPerRow; - size_t height; - size_t width; - - if (isPlanar) { - planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i); - bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i); - height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i); - width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i); - } else { - planeAddress = CVPixelBufferGetBaseAddress(pixelBuffer); - bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); - height = CVPixelBufferGetHeight(pixelBuffer); - width = CVPixelBufferGetWidth(pixelBuffer); - } - - NSNumber *length = @(bytesPerRow * height); - NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue]; - - NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary]; - planeBuffer[@"bytesPerRow"] = @(bytesPerRow); - planeBuffer[@"width"] = @(width); - planeBuffer[@"height"] = @(height); - planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes]; - - [planes addObject:planeBuffer]; - } - // Lock the base address before accessing pixel data, and unlock it afterwards. - // Done accessing the `pixelBuffer` at this point. - CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); - - NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary]; - imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth]; - imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight]; - imageBuffer[@"format"] = @(_videoFormat); - imageBuffer[@"planes"] = planes; - imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]]; - Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]); - Float64 nsExposureDuration = 1000000000 * exposureDuration; - imageBuffer[@"sensorExposureTime"] = [NSNumber numberWithInt:nsExposureDuration]; - imageBuffer[@"sensorSensitivity"] = [NSNumber numberWithFloat:[_captureDevice ISO]]; - - dispatch_async(dispatch_get_main_queue(), ^{ - eventSink(imageBuffer); - }); - } - } - if (_isRecording && !_isRecordingPaused) { - if (_videoWriter.status == AVAssetWriterStatusFailed) { - [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; - return; - } - - // ignore audio samples until the first video sample arrives to avoid black frames - // https://github.com/flutter/flutter/issues/57831 - if (_isFirstVideoSample && output != _captureVideoOutput.avOutput) { - return; - } - - CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); - - if (_isFirstVideoSample) { - [_videoWriter startSessionAtSourceTime:currentSampleTime]; - // fix sample times not being numeric when pause/resume happens before first sample buffer - // arrives - // https://github.com/flutter/flutter/issues/132014 - _lastVideoSampleTime = currentSampleTime; - _lastAudioSampleTime = currentSampleTime; - _isFirstVideoSample = NO; - } - - if (output == _captureVideoOutput.avOutput) { - if (_videoIsDisconnected) { - _videoIsDisconnected = NO; - - if (_videoTimeOffset.value == 0) { - _videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime); - } else { - CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime); - _videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset); - } - - return; - } - - _lastVideoSampleTime = currentSampleTime; - - CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset); - // do not append sample buffer when readyForMoreMediaData is NO to avoid crash - // https://github.com/flutter/flutter/issues/132073 - if (_videoWriterInput.readyForMoreMediaData) { - [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime]; - } - } else { - CMTime dur = CMSampleBufferGetDuration(sampleBuffer); - - if (dur.value > 0) { - currentSampleTime = CMTimeAdd(currentSampleTime, dur); - } - - if (_audioIsDisconnected) { - _audioIsDisconnected = NO; - - if (_audioTimeOffset.value == 0) { - _audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime); - } else { - CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime); - _audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset); - } - - return; - } - - _lastAudioSampleTime = currentSampleTime; - - if (_audioTimeOffset.value != 0) { - CMSampleBufferRef adjustedSampleBuffer = - [self copySampleBufferWithAdjustedTime:sampleBuffer by:_audioTimeOffset]; - [self newAudioSample:adjustedSampleBuffer]; - CFRelease(adjustedSampleBuffer); - } else { - [self newAudioSample:sampleBuffer]; - } - } - } -} - -- (CMSampleBufferRef)copySampleBufferWithAdjustedTime:(CMSampleBufferRef)sample by:(CMTime)offset { - CMItemCount count; - CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); - CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count); - CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); - for (CMItemCount i = 0; i < count; i++) { - pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); - pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); - } - CMSampleBufferRef sout; - CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); - free(pInfo); - return sout; -} - -- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer { - if (_videoWriter.status != AVAssetWriterStatusWriting) { - if (_videoWriter.status == AVAssetWriterStatusFailed) { - [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; - } - return; - } - if (_videoWriterInput.readyForMoreMediaData) { - if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) { - [self reportErrorMessage:@"Unable to write to video input"]; - } - } -} - -- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer { - if (_videoWriter.status != AVAssetWriterStatusWriting) { - if (_videoWriter.status == AVAssetWriterStatusFailed) { - [self reportErrorMessage:[NSString stringWithFormat:@"%@", _videoWriter.error]]; - } - return; - } - if (_audioWriterInput.readyForMoreMediaData) { - if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) { - [self reportErrorMessage:@"Unable to write to audio input"]; - } - } -} - - (void)close { [self stop]; for (AVCaptureInput *input in [_videoCaptureSession inputs]) { @@ -749,23 +495,9 @@ - (void)close { } - (void)dealloc { - if (_latestPixelBuffer) { - CFRelease(_latestPixelBuffer); - } [_motionManager stopAccelerometerUpdates]; } -- (CVPixelBufferRef)copyPixelBuffer { - __block CVPixelBufferRef pixelBuffer = nil; - // Use `dispatch_sync` because `copyPixelBuffer` API requires synchronous return. - dispatch_sync(self.pixelBufferSynchronizationQueue, ^{ - // No need weak self because it's dispatch_sync. - pixelBuffer = self.latestPixelBuffer; - self.latestPixelBuffer = nil; - }); - return pixelBuffer; -} - - (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion messengerForStreaming:(nullable NSObject *)messenger { if (!_isRecording) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h index 0283d79d8c2..4724009fe5c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h @@ -11,20 +11,21 @@ #import "FLTCamMediaSettingsAVWrapper.h" #import "FLTCaptureDevice.h" #import "FLTDeviceOrientationProviding.h" +#import "FLTImageStreamHandler.h" #import "messages.g.h" NS_ASSUME_NONNULL_BEGIN /// A class that manages camera's state and performs camera operations. -@interface FLTCam : NSObject +@interface FLTCam : NSObject @property(readonly, nonatomic) NSObject *captureDevice; @property(readonly, nonatomic) CGSize previewSize; @property(assign, nonatomic) BOOL isPreviewPaused; -@property(nonatomic, copy) void (^onFrameAvailable)(void); +@property(nonatomic, copy, nullable) void (^onFrameAvailable)(void); /// The API instance used to communicate with the Dart side of the plugin. Once initially set, this /// should only ever be accessed on the main thread. -@property(nonatomic) FCPCameraEventApi *dartAPI; +@property(nonatomic, nullable) FCPCameraEventApi *dartAPI; // Format used for video and image streaming. @property(assign, nonatomic) FourCharCode videoFormat; @property(assign, nonatomic) FCPPlatformImageFileFormat fileFormat; @@ -33,6 +34,22 @@ NS_ASSUME_NONNULL_BEGIN @property(readonly, nonatomic) CGFloat minimumExposureOffset; @property(readonly, nonatomic) CGFloat maximumExposureOffset; +// Properties exposed for the Swift DefaultCamera subclass +@property(nonatomic, nullable) FLTImageStreamHandler *imageStreamHandler; +/// Number of frames currently pending processing. +@property(assign, nonatomic) int streamingPendingFramesCount; +@property(assign, nonatomic) BOOL isFirstVideoSample; +@property(assign, nonatomic) BOOL isRecording; +@property(assign, nonatomic) BOOL isRecordingPaused; +@property(strong, nonatomic, nullable) NSObject *videoWriter; +@property(assign, nonatomic) BOOL videoIsDisconnected; +@property(assign, nonatomic) BOOL audioIsDisconnected; +@property(assign, nonatomic) CMTime videoTimeOffset; +@property(assign, nonatomic) CMTime audioTimeOffset; +@property(strong, nonatomic, nullable) NSObject *videoWriterInput; +@property(strong, nonatomic, nullable) NSObject *audioWriterInput; +@property(nullable) NSObject *videoAdaptor; + /// Initializes an `FLTCam` instance with the given configuration. /// @param error report to the caller if any error happened creating the camera. - (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h index 0eed426d3c8..e7bfdb55223 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h @@ -31,13 +31,6 @@ @property(readonly, nonatomic) NSMutableDictionary *inProgressSavePhotoDelegates; -/// Delegate callback when receiving a new video or audio sample. -/// Exposed for unit tests. -- (void)captureOutput:(AVCaptureOutput *)output - didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection - NS_SWIFT_NAME(captureOutput(_:didOutput:from:)); - /// Start streaming images. - (void)startImageStreamWithMessenger:(NSObject *)messenger imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler; diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index b7bbb4e5510..e0441452e23 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.19+1 +version: 0.9.19+2 environment: sdk: ^3.6.0