diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift index 0d6105aa6..91c08b34f 100644 --- a/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Protobuf/Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift @@ -22,8 +22,9 @@ extension Stream_Video_Sfu_Signal_TrackSubscriptionDetails { type: Stream_Video_Sfu_Models_TrackType ) { userID = userId - dimension = size.map { Stream_Video_Sfu_Models_VideoDimension($0) } - ?? Stream_Video_Sfu_Models_VideoDimension() + if type == .video || type == .screenShare { + dimension = size.map { Stream_Video_Sfu_Models_VideoDimension($0) } ?? Stream_Video_Sfu_Models_VideoDimension() + } sessionID = sessionId trackType = type } diff --git a/Sources/StreamVideo/WebRTC/v2/SFU/SFUAdapter.swift b/Sources/StreamVideo/WebRTC/v2/SFU/SFUAdapter.swift index 13abeb6c6..ad450cde4 100644 --- a/Sources/StreamVideo/WebRTC/v2/SFU/SFUAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/SFU/SFUAdapter.swift @@ -430,6 +430,7 @@ final class SFUAdapter: ConnectionStateDelegate, CustomStringConvertible, @unche try Task.checkCancellation() + log.debug(request, subsystems: .sfu) let task = Task { [request, signalService] in try Task.checkCancellation() return try await executeTask(retryPolicy: .neverGonnaGiveYouUp { true }) { diff --git a/Sources/StreamVideoSwiftUI/CallViewModel.swift b/Sources/StreamVideoSwiftUI/CallViewModel.swift index ef8ac7c47..83dbe70bf 100644 --- a/Sources/StreamVideoSwiftUI/CallViewModel.swift +++ b/Sources/StreamVideoSwiftUI/CallViewModel.swift @@ -162,6 +162,7 @@ open class CallViewModel: ObservableObject { private var recordingUpdates: AnyCancellable? private var screenSharingUpdates: AnyCancellable? private var callSettingsUpdates: AnyCancellable? + private var applicationLifecycleUpdates: AnyCancellable? private var ringingTimer: Foundation.Timer? private var lastScreenSharingParticipant: CallParticipant? @@ -216,6 +217,7 @@ open class CallViewModel: ObservableObject { localCallSettingsChange = callSettings != nil subscribeToCallEvents() + subscribeToApplicationLifecycleEvents() pictureInPictureAdapter.onSizeUpdate = { [weak self] in self?.updateTrackSize($0, for: $1) } @@ -835,6 +837,40 @@ open class CallViewModel: ObservableObject { private func participantAutoLeavePolicyTriggered() { leaveCall() } + + private func subscribeToApplicationLifecycleEvents() { + #if canImport(UIKit) + /// If we are running on a UIKit application, we observe the application state in order to disable + /// PictureInPicture when active but the app is in foreground. + applicationLifecycleUpdates = NotificationCenter.default + .publisher(for: UIApplication.didBecomeActiveNotification) + .sink { [weak self] _ in self?.applicationDidBecomeActive() } + log.debug("\(type(of: self)) now observes application lifecycle.") + #endif + } + + private func applicationDidBecomeActive() { + guard let call else { return } + + let tracksToBeActivated = call + .state + .participants + .filter { $0.hasVideo && $0.track?.isEnabled == false } + + guard !tracksToBeActivated.isEmpty else { + log.debug("\(type(of: self)) application lifecycle observer found no tracks to activate.") + return + } + + log.debug( + """ + \(tracksToBeActivated.count) tracks have been deactivate while in background + and now the app is active need to be activated again. + """ + ) + + tracksToBeActivated.forEach { $0.track?.isEnabled = true } + } } /// The state of the call. diff --git a/Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureVideoRenderer.swift b/Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureVideoRenderer.swift index 95156a3ec..9eb48f3f0 100644 --- a/Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureVideoRenderer.swift +++ b/Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureVideoRenderer.swift @@ -88,6 +88,8 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { /// A size ratio threshold used to determine if skipping frames is required. private let sizeRatioThreshold: CGFloat = 15 + private let isLoggingEnabled = false + // MARK: - Lifecycle @available(*, unavailable) @@ -131,14 +133,17 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { // has changed. trackSize = .init(width: Int(frame.width), height: Int(frame.height)) - log.debug("→ Received frame with trackSize:\(trackSize)", subsystems: .pictureInPicture) + logMessage( + .debug, + message: "→ Received frame with trackSize:\(trackSize)" + ) defer { handleFrameSkippingIfRequired() } guard shouldRenderFrame else { - log.debug("→ Skipping frame.") + logMessage(.debug, message: "→ Skipping frame.") return } @@ -146,10 +151,16 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { let yuvBuffer = bufferTransformer.transformAndResizeIfRequired(frame, targetSize: contentSize)? .buffer as? StreamRTCYUVBuffer, let sampleBuffer = yuvBuffer.sampleBuffer { - log.debug("➕ Buffer for trackId:\(track?.trackId ?? "n/a") added.", subsystems: .pictureInPicture) + logMessage( + .debug, + message: "➕ Buffer for trackId:\(track?.trackId ?? "n/a") added." + ) bufferPublisher.send(sampleBuffer) } else { - log.warning("Failed to convert \(type(of: frame.buffer)) CMSampleBuffer.", subsystems: .pictureInPicture) + logMessage( + .warning, + message: "Failed to convert \(type(of: frame.buffer)) CMSampleBuffer." + ) } } @@ -174,21 +185,24 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { buffer.isValid else { contentView.renderingComponent.flush() - log.debug("🔥 Display layer flushed.", subsystems: .pictureInPicture) + logMessage(.debug, message: "🔥 Display layer flushed.") return } - log.debug("⚙️ Processing buffer for trackId:\(trackId).", subsystems: .pictureInPicture) + logMessage( + .debug, + message: "⚙️ Processing buffer for trackId:\(trackId)." + ) if #available(iOS 14.0, *) { if contentView.renderingComponent.requiresFlushToResumeDecoding == true { contentView.renderingComponent.flush() - log.debug("🔥 Display layer for track:\(trackId) flushed.", subsystems: .pictureInPicture) + logMessage(.debug, message: "🔥 Display layer for track:\(trackId) flushed.") } } if contentView.renderingComponent.isReadyForMoreMediaData { contentView.renderingComponent.enqueue(buffer) - log.debug("✅ Buffer for trackId:\(trackId) enqueued.", subsystems: .pictureInPicture) + logMessage(.debug, message: "✅ Buffer for trackId:\(trackId) enqueued.") } } @@ -206,7 +220,10 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { .sink { [weak self] in self?.process($0) } track.add(self) - log.debug("⏳ Frame streaming for Picture-in-Picture started.", subsystems: .pictureInPicture) + logMessage( + .debug, + message: "⏳ Frame streaming for Picture-in-Picture started." + ) } /// A method that stops the frame consumption from the track. Used automatically when the rendering @@ -217,7 +234,7 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { bufferUpdatesCancellable = nil track?.remove(self) contentView.renderingComponent.flush() - log.debug("Frame streaming for Picture-in-Picture stopped.", subsystems: .pictureInPicture) + logMessage(.debug, message: "Frame streaming for Picture-in-Picture stopped.") } /// A method used to calculate rendering required properties, every time the trackSize changes. @@ -239,7 +256,9 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { /// to the value that fits. pictureInPictureWindowSizePolicy.trackSize = trackSize - log.debug( + logMessage( + .debug, + message: """ contentSize:\(contentSize) trackId:\(track?.trackId ?? "n/a") @@ -249,7 +268,7 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { skippedFrames:\(skippedFrames) widthDiffRatio:\(widthDiffRatio) heightDiffRatio:\(heightDiffRatio) - """, subsystems: .pictureInPicture + """ ) } @@ -261,9 +280,9 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { } else { skippedFrames += 1 } - log.debug( - "noOfFramesToSkipAfterRendering:\(noOfFramesToSkipAfterRendering) skippedFrames:\(skippedFrames)", - subsystems: .pictureInPicture + logMessage( + .debug, + message: "noOfFramesToSkipAfterRendering:\(noOfFramesToSkipAfterRendering) skippedFrames:\(skippedFrames)" ) } else if skippedFrames > 0 { skippedFrames = 0 @@ -278,4 +297,26 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { requiresResize = false startFrameStreaming(for: track, on: window) } + + private func logMessage( + _ level: LogLevel, + message: String, + error: Error? = nil, + file: StaticString = #file, + functionName: StaticString = #function, + line: UInt = #line + ) { + guard isLoggingEnabled else { + return + } + log.log( + level, + functionName: functionName, + fileName: file, + lineNumber: line, + message: message, + subsystems: .pictureInPicture, + error: error + ) + } }