diff --git a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt
index 1e6c0dbc83272..2b85aa04a2bdb 100644
--- a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt
+++ b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt
@@ -17,7 +17,7 @@ PASS rollback of a local offer to negotiated stable state should enable applying
PASS rollback a local offer with audio direction change to negotiated stable state and then add video receiver
PASS two transceivers with same mids
PASS onremovetrack fires during remote rollback
-FAIL rollback of a remote offer with stream changes assert_equals: expected 2 but got 1
+PASS rollback of a remote offer with stream changes
PASS removeTrack() with a sender being rolled back does not crash or throw
PASS Implicit rollback with only a datachannel works
diff --git a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt
index 2a4323b7eb9db..6947a02db718f 100644
--- a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt
+++ b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt
@@ -1,9 +1,7 @@
-Harness Error (TIMEOUT), message = null
-
PASS setStreams causes streams to be reported via ontrack on callee
PASS setStreams can be used to reconstruct a stream with a track on the remote side
PASS Adding streams and changing direction causes new streams to be reported via ontrack on callee
-TIMEOUT Adding streams to an active transceiver causes new streams to be reported via ontrack on callee Test timed out
+PASS Adding streams to an active transceiver causes new streams to be reported via ontrack on callee
PASS setStreams() fires InvalidStateError on a closed peer connection.
diff --git a/LayoutTests/platform/glib/TestExpectations b/LayoutTests/platform/glib/TestExpectations
index a3023fa33a1a7..56744c4b904a9 100644
--- a/LayoutTests/platform/glib/TestExpectations
+++ b/LayoutTests/platform/glib/TestExpectations
@@ -1474,6 +1474,8 @@ webrtc/video-av1.html [ Skip ]
# GStreamer's DTLS agent currently generates RSA certificates only. DTLS 1.2 is not supported yet (AFAIK).
webrtc/datachannel/dtls10.html [ Failure ]
+# FIXME: Remove Timeout expectation once bug #275685 is fixed.
+webkit.org/b/269285 webrtc/h265.html [ Failure Timeout ]
# Too slow with filtering implemented in WebKit. Should be done directly by GstWebRTC.
webrtc/datachannel/filter-ice-candidate.html [ Skip ]
diff --git a/LayoutTests/webrtc/msid-setCodecPreferences-expected.txt b/LayoutTests/webrtc/msid-setCodecPreferences-expected.txt
new file mode 100644
index 0000000000000..c965098c64331
--- /dev/null
+++ b/LayoutTests/webrtc/msid-setCodecPreferences-expected.txt
@@ -0,0 +1,3 @@
+
+PASS msid present in offer SDP after setting codec preferences
+
diff --git a/LayoutTests/webrtc/msid-setCodecPreferences.html b/LayoutTests/webrtc/msid-setCodecPreferences.html
new file mode 100644
index 0000000000000..2f4f69a205066
--- /dev/null
+++ b/LayoutTests/webrtc/msid-setCodecPreferences.html
@@ -0,0 +1,27 @@
+
+
+
+
+ Testing basic video exchange from offerer to receiver
+
+
+
+
+
+
+
+
diff --git a/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt
new file mode 100644
index 0000000000000..f5340be2d8065
--- /dev/null
+++ b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt
@@ -0,0 +1,5 @@
+
+PASS setConfiguration after data channel is created
+PASS setConfiguration after video transceiver is added
+PASS setConfiguration after audio transceiver is added
+
diff --git a/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html
new file mode 100644
index 0000000000000..2b099271bbd7e
--- /dev/null
+++ b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html
@@ -0,0 +1,39 @@
+
+
+
+
+ Testing media fields in SDP when setConfiguration comes after createDataChannel/addTransceiver
+
+
+
+
+
+
+
+
diff --git a/Source/WebCore/Modules/mediastream/MediaStream.h b/Source/WebCore/Modules/mediastream/MediaStream.h
index a9793c54bec1f..abd3b56fa6326 100644
--- a/Source/WebCore/Modules/mediastream/MediaStream.h
+++ b/Source/WebCore/Modules/mediastream/MediaStream.h
@@ -78,6 +78,9 @@ class MediaStream final
RefPtr clone();
+ using WeakValueType = MediaStreamPrivate::Observer::WeakValueType;
+ using MediaStreamPrivate::Observer::weakPtrFactory;
+
bool active() const { return m_isActive; }
bool muted() const { return m_private->muted(); }
diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
index 91229c6574d51..b8bb6a0679540 100644
--- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
+++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
@@ -610,12 +610,21 @@ void MediaStreamTrack::trackMutedChanged(MediaStreamTrackPrivate&)
if (document->activeDOMObjectsAreStopped() || m_ended)
return;
- queueTaskKeepingObjectAlive(*this, TaskSource::Networking, [this, muted = m_private->muted()] {
+ Function updateMuted = [this, muted = m_private->muted()] {
if (!scriptExecutionContext() || scriptExecutionContext()->activeDOMObjectsAreStopped())
return;
+
+ if (m_muted == muted)
+ return;
+
m_muted = muted;
dispatchEvent(Event::create(muted ? eventNames().muteEvent : eventNames().unmuteEvent, Event::CanBubble::No, Event::IsCancelable::No));
- });
+ };
+ if (m_shouldFireMuteEventImmediately)
+ updateMuted();
+ else
+ queueTaskKeepingObjectAlive(*this, TaskSource::Networking, WTFMove(updateMuted));
+
configureTrackRendering();
bool wasInterrupted = m_isInterrupted;
diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.h b/Source/WebCore/Modules/mediastream/MediaStreamTrack.h
index 87347e6a0a291..c9d46149f7f15 100644
--- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.h
+++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.h
@@ -163,6 +163,8 @@ class MediaStreamTrack
const void* logIdentifier() const final { return m_private->logIdentifier(); }
#endif
+ void setShouldFireMuteEventImmediately(bool value) { m_shouldFireMuteEventImmediately = value; }
+
protected:
MediaStreamTrack(ScriptExecutionContext&, Ref&&);
@@ -212,6 +214,7 @@ class MediaStreamTrack
bool m_ended { false };
const bool m_isCaptureTrack { false };
bool m_isInterrupted { false };
+ bool m_shouldFireMuteEventImmediately { false };
};
typedef Vector[> MediaStreamTrackVector;
diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp
index 17c0c0c05e592..a73f0eeb30bb2 100644
--- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp
+++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp
@@ -186,25 +186,114 @@ void PeerConnectionBackend::setLocalDescription(const RTCSessionDescription* ses
{
ASSERT(!m_peerConnection.isClosed());
+ m_isProcessingLocalDescriptionAnswer = sessionDescription && (sessionDescription->type() == RTCSdpType::Answer || sessionDescription->type() == RTCSdpType::Pranswer);
m_setDescriptionCallback = WTFMove(callback);
doSetLocalDescription(sessionDescription);
}
-void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional&& descriptionStates, std::unique_ptr&& sctpBackend)
+struct MediaStreamAndTrackItem {
+ Ref stream;
+ Ref track;
+};
+
+// https://w3c.github.io/webrtc-pc/#set-associated-remote-streams
+static void setAssociatedRemoteStreams(RTCRtpReceiver& receiver, const PeerConnectionBackend::TransceiverState& state, Vector& addList, Vector& removeList)
+{
+ for (auto& currentStream : receiver.associatedStreams()) {
+ if (currentStream && !anyOf(state.receiverStreams, [¤tStream](auto& stream) { return stream->id() == currentStream->id(); }))
+ removeList.append({ Ref { *currentStream }, Ref { receiver.track() } });
+ }
+
+ for (auto& stream : state.receiverStreams) {
+ if (!anyOf(receiver.associatedStreams(), [&stream](auto& currentStream) { return stream->id() == currentStream->id(); }))
+ addList.append({ *stream, Ref { receiver.track() } });
+ }
+
+ receiver.setAssociatedStreams(WTF::map(state.receiverStreams, [](auto& stream) { return WeakPtr { stream.get() }; }));
+}
+
+static bool isDirectionReceiving(RTCRtpTransceiverDirection direction)
+{
+ return direction == RTCRtpTransceiverDirection::Sendrecv || direction == RTCRtpTransceiverDirection::Recvonly;
+}
+
+// https://w3c.github.io/webrtc-pc/#process-remote-tracks
+static void processRemoteTracks(RTCRtpTransceiver& transceiver, PeerConnectionBackend::TransceiverState&& state, Vector& addList, Vector& removeList, Vector][>& trackEventList, Vector][>& muteTrackList)
+{
+ auto addListSize = addList.size();
+ auto& receiver = transceiver.receiver();
+ setAssociatedRemoteStreams(receiver, state, addList, removeList);
+ if ((state.firedDirection && isDirectionReceiving(*state.firedDirection) && (!transceiver.firedDirection() || !isDirectionReceiving(*transceiver.firedDirection()))) || addListSize != addList.size()) {
+ // https://w3c.github.io/webrtc-pc/#process-remote-track-addition
+ trackEventList.append(RTCTrackEvent::create(eventNames().trackEvent, Event::CanBubble::No, Event::IsCancelable::No, &receiver, &receiver.track(), WTFMove(state.receiverStreams), &transceiver));
+ }
+ if (!(state.firedDirection && isDirectionReceiving(*state.firedDirection)) && transceiver.firedDirection() && isDirectionReceiving(*transceiver.firedDirection())) {
+ // https://w3c.github.io/webrtc-pc/#process-remote-track-removal
+ muteTrackList.append(receiver.track());
+ }
+ transceiver.setFiredDirection(state.firedDirection);
+}
+
+void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend, std::optional maxMessageSize)
{
ASSERT(isMainThread());
ALWAYS_LOG(LOGIDENTIFIER);
-
+ if (transceiverStates)
+ DEBUG_LOG(LOGIDENTIFIER, "Transceiver states: ", *transceiverStates);
ASSERT(m_setDescriptionCallback);
- m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), sctpBackend = WTFMove(sctpBackend)]() mutable {
+ m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend), maxMessageSize]() mutable {
if (m_peerConnection.isClosed())
return;
- if (descriptionStates)
- m_peerConnection.updateDescriptions(WTFMove(*descriptionStates));
m_peerConnection.updateTransceiversAfterSuccessfulLocalDescription();
- m_peerConnection.updateSctpBackend(WTFMove(sctpBackend));
+ m_peerConnection.updateSctpBackend(WTFMove(sctpBackend), maxMessageSize);
+
+ if (descriptionStates) {
+ m_peerConnection.updateDescriptions(WTFMove(*descriptionStates));
+ if (m_peerConnection.isClosed())
+ return;
+ }
+
m_peerConnection.processIceTransportChanges();
+ if (m_peerConnection.isClosed())
+ return;
+
+ if (m_isProcessingLocalDescriptionAnswer && transceiverStates) {
+ // Compute track related events.
+ Vector removeList;
+ Vector][> muteTrackList;
+ Vector addListNoOp;
+ for (auto& transceiverState : *transceiverStates) {
+ RefPtr transceiver;
+ for (auto& item : m_peerConnection.currentTransceivers()) {
+ if (item->mid() == transceiverState.mid) {
+ transceiver = item;
+ break;
+ }
+ }
+ if (transceiver) {
+ if (!(transceiverState.firedDirection && isDirectionReceiving(*transceiverState.firedDirection)) && transceiver->firedDirection() && isDirectionReceiving(*transceiver->firedDirection())) {
+ setAssociatedRemoteStreams(transceiver->receiver(), transceiverState, addListNoOp, removeList);
+ muteTrackList.append(transceiver->receiver().track());
+ }
+ }
+ transceiver->setFiredDirection(transceiverState.firedDirection);
+ }
+ for (auto& track : muteTrackList) {
+ track->setShouldFireMuteEventImmediately(true);
+ track->source().setMuted(true);
+ track->setShouldFireMuteEventImmediately(false);
+ if (m_peerConnection.isClosed())
+ return;
+ }
+
+ for (auto& pair : removeList) {
+ pair.stream->privateStream().removeTrack(pair.track->privateTrack());
+ if (m_peerConnection.isClosed())
+ return;
+ }
+ }
+
callback({ });
});
}
@@ -231,44 +320,92 @@ void PeerConnectionBackend::setRemoteDescription(const RTCSessionDescription& se
doSetRemoteDescription(sessionDescription);
}
-void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&& descriptionStates, std::unique_ptr&& sctpBackend)
+void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend, std::optional maxMessageSize)
{
ASSERT(isMainThread());
ALWAYS_LOG(LOGIDENTIFIER, "Set remote description succeeded");
+ if (transceiverStates)
+ DEBUG_LOG(LOGIDENTIFIER, "Transceiver states: ", *transceiverStates);
ASSERT(m_setDescriptionCallback);
- m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), sctpBackend = WTFMove(sctpBackend), events = WTFMove(m_pendingTrackEvents)]() mutable {
+ m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend), maxMessageSize]() mutable {
if (m_peerConnection.isClosed())
return;
- if (descriptionStates)
- m_peerConnection.updateDescriptions(WTFMove(*descriptionStates));
+ Vector removeList;
+ if (transceiverStates) {
+ for (auto& transceiver : m_peerConnection.currentTransceivers()) {
+ if (!anyOf(*transceiverStates, [&transceiver](auto& state) { return state.mid == transceiver->mid(); })) {
+ for (auto& stream : transceiver->receiver().associatedStreams()) {
+ if (stream)
+ removeList.append({ Ref { *stream }, Ref { transceiver->receiver().track() } });
+ }
+ }
+ }
+ }
- for (auto& event : events)
- dispatchTrackEvent(event);
+ m_peerConnection.updateTransceiversAfterSuccessfulRemoteDescription();
+ m_peerConnection.updateSctpBackend(WTFMove(sctpBackend), maxMessageSize);
+ if (descriptionStates) {
+ m_peerConnection.updateDescriptions(WTFMove(*descriptionStates));
+ if (m_peerConnection.isClosed())
+ return;
+ }
+
+ m_peerConnection.processIceTransportChanges();
if (m_peerConnection.isClosed())
return;
- m_peerConnection.updateTransceiversAfterSuccessfulRemoteDescription();
- m_peerConnection.updateSctpBackend(WTFMove(sctpBackend));
- m_peerConnection.processIceTransportChanges();
- callback({ });
- });
-}
+ if (transceiverStates) {
+ // Compute track related events.
+ Vector][> muteTrackList;
+ Vector addList;
+ Vector][> trackEventList;
+ for (auto& transceiverState : *transceiverStates) {
+ RefPtr transceiver;
+ for (auto& item : m_peerConnection.currentTransceivers()) {
+ if (item->mid() == transceiverState.mid) {
+ transceiver = item;
+ break;
+ }
+ }
+ if (transceiver)
+ processRemoteTracks(*transceiver, WTFMove(transceiverState), addList, removeList, trackEventList, muteTrackList);
+ }
-void PeerConnectionBackend::dispatchTrackEvent(PendingTrackEvent& event)
-{
- auto& track = event.track.get();
+ for (auto& track : muteTrackList) {
+ track->setShouldFireMuteEventImmediately(true);
+ track->source().setMuted(true);
+ track->setShouldFireMuteEventImmediately(false);
+ if (m_peerConnection.isClosed())
+ return;
+ }
- m_peerConnection.dispatchEvent(RTCTrackEvent::create(eventNames().trackEvent, Event::CanBubble::No, Event::IsCancelable::No, WTFMove(event.receiver), WTFMove(event.track), WTFMove(event.streams), WTFMove(event.transceiver)));
- ALWAYS_LOG(LOGIDENTIFIER, "Dispatched if feasible track of type ", track.source().type());
+ for (auto& pair : removeList) {
+ pair.stream->privateStream().removeTrack(pair.track->privateTrack());
+ if (m_peerConnection.isClosed())
+ return;
+ }
- if (m_peerConnection.isClosed())
- return;
+ for (auto& pair : addList) {
+ pair.stream->addTrackFromPlatform(pair.track.copyRef());
+ if (m_peerConnection.isClosed())
+ return;
+ }
+
+ for (auto& event : trackEventList) {
+ RefPtr track = event->track();
+ m_peerConnection.dispatchEvent(event);
+ if (m_peerConnection.isClosed())
+ return;
+
+ track->source().setMuted(false);
+ }
+ }
- // FIXME: As per spec, we should set muted to 'false' when starting to receive the content from network.
- track.source().setMuted(false);
+ callback({ });
+ });
}
void PeerConnectionBackend::setRemoteDescriptionFailed(Exception&& exception)
@@ -276,9 +413,6 @@ void PeerConnectionBackend::setRemoteDescriptionFailed(Exception&& exception)
ASSERT(isMainThread());
ALWAYS_LOG(LOGIDENTIFIER, "Set remote description failed:", exception.message());
- ASSERT(m_pendingTrackEvents.isEmpty());
- m_pendingTrackEvents.clear();
-
ASSERT(m_setDescriptionCallback);
m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), exception = WTFMove(exception)]() mutable {
if (m_peerConnection.isClosed())
@@ -299,12 +433,6 @@ void PeerConnectionBackend::iceGatheringStateChanged(RTCIceGatheringState state)
});
}
-void PeerConnectionBackend::addPendingTrackEvent(PendingTrackEvent&& event)
-{
- ASSERT(!m_peerConnection.isStopped());
- m_pendingTrackEvents.append(WTFMove(event));
-}
-
static String extractIPAddress(StringView sdp)
{
unsigned counter = 0;
@@ -435,8 +563,6 @@ void PeerConnectionBackend::stop()
m_offerAnswerCallback = nullptr;
m_setDescriptionCallback = nullptr;
- m_pendingTrackEvents.clear();
-
doStop();
}
@@ -498,6 +624,55 @@ WTFLogChannel& PeerConnectionBackend::logChannel() const
}
#endif
+static Ref toJSONObject(const PeerConnectionBackend::TransceiverState& transceiverState)
+{
+ auto object = JSON::Object::create();
+ object->setString("mid"_s, transceiverState.mid);
+
+ auto receiverStreams = JSON::Array::create();
+ for (auto receiverStream : transceiverState.receiverStreams)
+ receiverStreams->pushString(receiverStream->id());
+ object->setArray("receiverStreams"_s, WTFMove(receiverStreams));
+
+ if (auto firedDirection = transceiverState.firedDirection)
+ object->setString("firedDirection"_s, convertEnumerationToString(*firedDirection));
+
+ return object;
+}
+
+static Ref toJSONArray(const PeerConnectionBackend::TransceiverStates& transceiverStates)
+{
+ auto array = JSON::Array::create();
+ for (auto transceiverState : transceiverStates)
+ array->pushObject(toJSONObject(transceiverState));
+
+ return array;
+}
+
+static String toJSONString(const PeerConnectionBackend::TransceiverState& transceiverState)
+{
+ return toJSONObject(transceiverState)->toJSONString();
+}
+
+static String toJSONString(const PeerConnectionBackend::TransceiverStates& transceiverStates)
+{
+ return toJSONArray(transceiverStates)->toJSONString();
+}
+
} // namespace WebCore
+namespace WTF {
+
+String LogArgument::toString(const WebCore::PeerConnectionBackend::TransceiverState& transceiverState)
+{
+ return toJSONString(transceiverState);
+}
+
+String LogArgument::toString(const WebCore::PeerConnectionBackend::TransceiverStates& transceiverStates)
+{
+ return toJSONString(transceiverStates);
+}
+
+}
+
#endif // ENABLE(WEB_RTC)
diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h
index 62468efdbb63c..46a7e7163f778 100644
--- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h
+++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h
@@ -37,6 +37,7 @@
#include "RTCIceGatheringState.h"
#include "RTCRtpCapabilities.h"
#include "RTCRtpSendParameters.h"
+#include "RTCRtpTransceiverDirection.h"
#include "RTCSessionDescription.h"
#include "RTCSignalingState.h"
#include
@@ -136,7 +137,15 @@ class PeerConnectionBackend
String currentRemoteDescriptionSdp;
std::optional pendingRemoteDescriptionSdpType;
String pendingRemoteDescriptionSdp;
+
+ DescriptionStates isolatedCopy() &&;
+ };
+ struct TransceiverState {
+ String mid;
+ Vector> receiverStreams;
+ std::optional firedDirection;
};
+ using TransceiverStates = Vector;
void newICECandidate(String&& sdp, String&& mid, unsigned short sdpMLineIndex, String&& serverURL, std::optional&&);
void newDataChannel(UniqueRef&&, String&&, RTCDataChannelInit&&);
@@ -211,24 +220,14 @@ class PeerConnectionBackend
void createAnswerSucceeded(String&&);
void createAnswerFailed(Exception&&);
- void setLocalDescriptionSucceeded(std::optional&&, std::unique_ptr&&);
+ void setLocalDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&, std::optional);
void setLocalDescriptionFailed(Exception&&);
- void setRemoteDescriptionSucceeded(std::optional&&, std::unique_ptr&&);
+ void setRemoteDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&, std::optional);
void setRemoteDescriptionFailed(Exception&&);
void validateSDP(const String&) const;
- struct PendingTrackEvent {
- Ref receiver;
- Ref track;
- Vector> streams;
- RefPtr transceiver;
- };
- void addPendingTrackEvent(PendingTrackEvent&&);
-
- void dispatchTrackEvent(PendingTrackEvent&);
-
private:
virtual void doCreateOffer(RTCOfferOptions&&) = 0;
virtual void doCreateAnswer(RTCAnswerOptions&&) = 0;
@@ -246,15 +245,45 @@ class PeerConnectionBackend
bool m_shouldFilterICECandidates { true };
- Vector m_pendingTrackEvents;
-
#if !RELEASE_LOG_DISABLED
Ref m_logger;
const void* m_logIdentifier;
#endif
bool m_finishedGatheringCandidates { false };
+ bool m_isProcessingLocalDescriptionAnswer { false };
};
+inline PeerConnectionBackend::DescriptionStates PeerConnectionBackend::DescriptionStates::isolatedCopy() &&
+{
+ return DescriptionStates {
+ signalingState,
+ currentLocalDescriptionSdpType,
+ WTFMove(currentLocalDescriptionSdp).isolatedCopy(),
+ pendingLocalDescriptionSdpType,
+ WTFMove(pendingLocalDescriptionSdp).isolatedCopy(),
+ currentRemoteDescriptionSdpType,
+ WTFMove(currentRemoteDescriptionSdp).isolatedCopy(),
+ pendingRemoteDescriptionSdpType,
+ WTFMove(pendingRemoteDescriptionSdp).isolatedCopy()
+ };
+}
} // namespace WebCore
+namespace WTF {
+
+template
+struct LogArgument;
+
+template <>
+struct LogArgument {
+ static String toString(const WebCore::PeerConnectionBackend::TransceiverState&);
+};
+
+template <>
+struct LogArgument {
+ static String toString(const WebCore::PeerConnectionBackend::TransceiverStates&);
+};
+
+}
+
#endif // ENABLE(WEB_RTC)
diff --git a/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp b/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp
index 0e4ed66346916..6e2669b6d854c 100644
--- a/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp
+++ b/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp
@@ -1096,24 +1096,25 @@ void RTCPeerConnection::updateTransceiversAfterSuccessfulRemoteDescription()
updateTransceiverTransports();
}
-void RTCPeerConnection::updateSctpBackend(std::unique_ptr&& sctpBackend)
+void RTCPeerConnection::updateSctpBackend(std::unique_ptr&& sctpBackend, std::optional maxMessageSize)
{
if (!sctpBackend) {
m_sctpTransport = nullptr;
return;
}
- if (m_sctpTransport && m_sctpTransport->backend() == *sctpBackend) {
- m_sctpTransport->update();
- return;
+
+ if (!m_sctpTransport || m_sctpTransport->backend() != *sctpBackend) {
+ RefPtr context = scriptExecutionContext();
+ if (!context)
+ return;
+
+ auto dtlsTransport = getOrCreateDtlsTransport(sctpBackend->dtlsTransportBackend().moveToUniquePtr());
+ if (!dtlsTransport)
+ return;
+ m_sctpTransport = RTCSctpTransport::create(*context, makeUniqueRefFromNonNullUniquePtr(WTFMove(sctpBackend)), dtlsTransport.releaseNonNull());
}
- auto* context = scriptExecutionContext();
- if (!context)
- return;
- auto dtlsTransport = getOrCreateDtlsTransport(sctpBackend->dtlsTransportBackend().moveToUniquePtr());
- if (!dtlsTransport)
- return;
- m_sctpTransport = RTCSctpTransport::create(*context, makeUniqueRefFromNonNullUniquePtr(WTFMove(sctpBackend)), dtlsTransport.releaseNonNull());
+ m_sctpTransport->updateMaxMessageSize(maxMessageSize);
}
#if !RELEASE_LOG_DISABLED
diff --git a/Source/WebCore/Modules/mediastream/RTCPeerConnection.h b/Source/WebCore/Modules/mediastream/RTCPeerConnection.h
index a9b4371b841d5..08ee575633d3b 100644
--- a/Source/WebCore/Modules/mediastream/RTCPeerConnection.h
+++ b/Source/WebCore/Modules/mediastream/RTCPeerConnection.h
@@ -191,7 +191,7 @@ class RTCPeerConnection final
void updateDescriptions(PeerConnectionBackend::DescriptionStates&&);
void updateTransceiversAfterSuccessfulLocalDescription();
void updateTransceiversAfterSuccessfulRemoteDescription();
- void updateSctpBackend(std::unique_ptr&&);
+ void updateSctpBackend(std::unique_ptr&&, std::optional);
void processIceTransportStateChange(RTCIceTransport&);
void processIceTransportChanges();
diff --git a/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h b/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h
index a7999029f1583..d7f6613563e75 100644
--- a/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h
+++ b/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h
@@ -32,7 +32,7 @@
#if ENABLE(WEB_RTC)
-#include "MediaStreamTrack.h"
+#include "MediaStream.h"
#include "RTCDtlsTransport.h"
#include "RTCRtpReceiverBackend.h"
#include "RTCRtpSynchronizationSource.h"
@@ -79,6 +79,8 @@ class RTCRtpReceiver final : public RefCounted
std::optional transform();
ExceptionOr setTransform(std::unique_ptr&&);
+ const Vector>& associatedStreams() const { return m_associatedStreams; }
+ void setAssociatedStreams(Vector>&& streams) { m_associatedStreams = WTFMove(streams); }
private:
RTCRtpReceiver(PeerConnectionBackend&, Ref&&, std::unique_ptr&&);
@@ -94,6 +96,7 @@ class RTCRtpReceiver final : public RefCounted
std::unique_ptr m_backend;
WeakPtr m_connection;
std::unique_ptr m_transform;
+ Vector> m_associatedStreams;
#if !RELEASE_LOG_DISABLED
Ref m_logger;
const void* m_logIdentifier { nullptr };
diff --git a/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h b/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h
index 7dfb01948a502..f00f4cd5b0318 100644
--- a/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h
+++ b/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h
@@ -72,10 +72,14 @@ class RTCRtpTransceiver final : public RefCounted, public Scr
RTCRtpTransceiverBackend* backend() { return m_backend.get(); }
void setConnection(RTCPeerConnection&);
+ std::optional firedDirection() const { return m_firedDirection; }
+ void setFiredDirection(std::optional firedDirection) { m_firedDirection = firedDirection; }
+
private:
RTCRtpTransceiver(Ref&&, Ref&&, std::unique_ptr&&);
RTCRtpTransceiverDirection m_direction;
+ std::optional m_firedDirection;
Ref m_sender;
Ref m_receiver;
diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp b/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp
index 278903b22bb35..e6260f95291c3 100644
--- a/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp
+++ b/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp
@@ -74,8 +74,7 @@ void RTCSctpTransport::onStateChanged(RTCSctpTransportState state, std::optional
if (m_state == RTCSctpTransportState::Closed)
return;
- if (maxMessageSize)
- m_maxMessageSize = *maxMessageSize;
+ m_maxMessageSize = maxMessageSize;
if (maxChannels)
m_maxChannels = *maxChannels;
@@ -86,6 +85,11 @@ void RTCSctpTransport::onStateChanged(RTCSctpTransportState state, std::optional
});
}
+void RTCSctpTransport::updateMaxMessageSize(std::optional maxMessageSize)
+{
+ m_maxMessageSize = maxMessageSize;
+}
+
} // namespace WebCore
#endif // ENABLE(WEB_RTC)
diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransport.h b/Source/WebCore/Modules/mediastream/RTCSctpTransport.h
index ec3745b97a1a3..7b529c04e1642 100644
--- a/Source/WebCore/Modules/mediastream/RTCSctpTransport.h
+++ b/Source/WebCore/Modules/mediastream/RTCSctpTransport.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2021 Apple Inc. All rights reserved.
+ * Copyright (C) 2021-2024 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -46,10 +46,10 @@ class RTCSctpTransport final : public RefCounted, public Activ
RTCDtlsTransport& transport() { return m_transport.get(); }
RTCSctpTransportState state() const { return m_state; }
- double maxMessageSize() const { return m_maxMessageSize; }
+ double maxMessageSize() const { return m_maxMessageSize.value_or(std::numeric_limits::infinity()); }
std::optional maxChannels() const { return m_maxChannels; }
- void update() { }
+ void updateMaxMessageSize(std::optional);
const RTCSctpTransportBackend& backend() const { return m_backend.get(); }
@@ -73,7 +73,7 @@ class RTCSctpTransport final : public RefCounted, public Activ
UniqueRef m_backend;
Ref m_transport;
RTCSctpTransportState m_state { RTCSctpTransportState::Connecting };
- double m_maxMessageSize { std::numeric_limits::max() };
+ std::optional m_maxMessageSize;
std::optional m_maxChannels;
};
diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h b/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h
index c991e7dd884f7..8b80cfb75d4e9 100644
--- a/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h
+++ b/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h
@@ -54,6 +54,11 @@ inline bool operator==(const RTCSctpTransportBackend& a, const RTCSctpTransportB
return a.backend() == b.backend();
}
+inline bool operator!=(const RTCSctpTransportBackend& a, const RTCSctpTransportBackend& b)
+{
+ return !(a == b);
+}
+
} // namespace WebCore
#endif // ENABLE(WEB_RTC)
diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp
index 41d3237505891..cfc55dff49cfe 100644
--- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp
+++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp
@@ -80,6 +80,8 @@ GStreamerMediaEndpoint::GStreamerMediaEndpoint(GStreamerPeerConnectionBackend& p
std::call_once(debugRegisteredFlag, [] {
GST_DEBUG_CATEGORY_INIT(webkit_webrtc_endpoint_debug, "webkitwebrtcendpoint", 0, "WebKit WebRTC end-point");
});
+
+ initializePipeline();
}
GStreamerMediaEndpoint::~GStreamerMediaEndpoint()
@@ -168,6 +170,10 @@ bool GStreamerMediaEndpoint::initializePipeline()
}), this);
#ifndef GST_DISABLE_GST_DEBUG
+ g_signal_connect_swapped(m_webrtcBin.get(), "on-new-transceiver", G_CALLBACK(+[](GStreamerMediaEndpoint* endPoint, GstWebRTCRTPTransceiver* transceiver) {
+ GST_DEBUG_OBJECT(endPoint->m_webrtcBin.get(), "New transceiver: %" GST_PTR_FORMAT, transceiver);
+ }), this);
+
g_signal_connect(m_webrtcBin.get(), "notify::connection-state", G_CALLBACK(+[](GstElement* webrtcBin, GParamSpec*, GStreamerMediaEndpoint* endPoint) {
GstWebRTCPeerConnectionState state;
g_object_get(webrtcBin, "connection-state", &state, nullptr);
@@ -244,12 +250,6 @@ void GStreamerMediaEndpoint::disposeElementChain(GstElement* element)
bool GStreamerMediaEndpoint::setConfiguration(MediaEndpointConfiguration& configuration)
{
- if (m_pipeline)
- teardownPipeline();
-
- if (!initializePipeline())
- return false;
-
auto bundlePolicy = bundlePolicyFromConfiguration(configuration);
auto iceTransportPolicy = iceTransportPolicyFromConfiguration(configuration);
g_object_set(m_webrtcBin.get(), "bundle-policy", bundlePolicy, "ice-transport-policy", iceTransportPolicy, nullptr);
@@ -286,9 +286,8 @@ bool GStreamerMediaEndpoint::setConfiguration(MediaEndpointConfiguration& config
// WIP: https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/302
GST_FIXME("%zu custom certificates not propagated to webrtcbin", configuration.certificates.size());
- gst_element_set_state(m_pipeline.get(), GST_STATE_READY);
- GST_DEBUG_OBJECT(m_pipeline.get(), "End-point ready");
gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ GST_DEBUG_OBJECT(m_pipeline.get(), "End-point ready");
return true;
}
@@ -317,7 +316,7 @@ static std::optional> fetchDescription(GstElement*
const auto attribute = gst_sdp_message_get_attribute(description->sdp, i);
if (!g_strcmp0(attribute->key, "end-of-candidates")) {
gst_sdp_message_remove_attribute(description->sdp, i);
- i--;
+ break;
}
}
@@ -375,6 +374,119 @@ static std::optional descriptionsFromW
};
}
+struct GStreamerMediaEndpointTransceiverState {
+ String mid;
+ Vector receiverStreamIds;
+ std::optional firedDirection;
+
+ GStreamerMediaEndpointTransceiverState isolatedCopy() &&;
+};
+
+inline GStreamerMediaEndpointTransceiverState GStreamerMediaEndpointTransceiverState::isolatedCopy() &&
+{
+ return {
+ WTFMove(mid).isolatedCopy(),
+ crossThreadCopy(WTFMove(receiverStreamIds)),
+ firedDirection
+ };
+}
+
+Vector getMediaStreamIdsFromSDPMedia(const GstSDPMedia& media)
+{
+ HashSet mediaStreamIdsSet;
+ for (guint i = 0; i < gst_sdp_media_attributes_len(&media); ++i) {
+ const auto attribute = gst_sdp_media_get_attribute(&media, i);
+ if (!g_strcmp0(attribute->key, "msid")) {
+ auto components = String::fromUTF8(attribute->value).split(' ');
+ if (components.size() < 2)
+ continue;
+ mediaStreamIdsSet.add(components[0]);
+ }
+ // MSID may also come in ssrc attributes, specially if they're in an SDP answer. They look like:
+ // a=ssrc:3612593434 msid:e1019f4a-0983-4863-b923-b75903cced2c webrtctransceiver1
+ if (!g_strcmp0(attribute->key, "ssrc")) {
+ auto outerComponents = String::fromUTF8(attribute->value).split(' ');
+ for (auto& outer : outerComponents) {
+ auto innerComponents = outer.split(':');
+ if (innerComponents.size() < 2)
+ continue;
+ if (innerComponents[0] == "msid"_s)
+ mediaStreamIdsSet.add(innerComponents[1]);
+ }
+ }
+ }
+ Vector mediaStreamIds;
+ mediaStreamIds.reserveCapacity(mediaStreamIdsSet.size());
+ for (const auto& msid : mediaStreamIdsSet)
+ mediaStreamIds.append(msid);
+ return mediaStreamIds;
+}
+
+inline bool isRecvDirection(GstWebRTCRTPTransceiverDirection direction)
+{
+ return direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV || direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY;
+}
+
+static std::optional toGStreamerMediaEndpointTransceiverState(GstElement* webrtcBin, GstWebRTCRTPTransceiver* transceiver)
+{
+ GRefPtr receiver;
+ GUniqueOutPtr mid;
+ GstWebRTCRTPTransceiverDirection currentDirection;
+ guint mLineIndex;
+ g_object_get(transceiver, "receiver", &receiver.outPtr(), "current-direction", ¤tDirection, "mlineindex", &mLineIndex, "mid", &mid.outPtr(), nullptr);
+#ifndef GST_DISABLE_GST_DEBUG
+ GUniquePtr desc(g_enum_to_string(GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, currentDirection));
+ GST_TRACE_OBJECT(webrtcBin, "Receiver = %" GST_PTR_FORMAT ", current-direction = %s, mlineindex = %u, mid = %s", receiver.get(), desc.get(), mLineIndex, GST_STR_NULL(mid.get()));
+#endif
+
+ GUniqueOutPtr localDescription, remoteDescription;
+ g_object_get(webrtcBin, "local-description", &localDescription.outPtr(), "remote-description", &remoteDescription.outPtr(), nullptr);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ if (localDescription) {
+ GUniquePtr sdp(gst_sdp_message_as_text(localDescription->sdp));
+ GST_TRACE_OBJECT(webrtcBin, "Local-description:\n%s", sdp.get());
+ }
+ if (remoteDescription) {
+ GUniquePtr sdp(gst_sdp_message_as_text(remoteDescription->sdp));
+ GST_TRACE_OBJECT(webrtcBin, "Remote-description:\n%s", sdp.get());
+ }
+#endif
+
+ Vector streamIds;
+ if (remoteDescription && remoteDescription->sdp && mLineIndex < gst_sdp_message_medias_len(remoteDescription->sdp)) {
+ const GstSDPMedia* media = gst_sdp_message_get_media(remoteDescription->sdp, mLineIndex);
+ if (isRecvDirection(currentDirection))
+ streamIds = getMediaStreamIdsFromSDPMedia(*media);
+ }
+
+ if (UNLIKELY(!mid))
+ return { };
+
+ return { { String::fromUTF8(mid.get()), WTFMove(streamIds), { toRTCRtpTransceiverDirection(currentDirection) } } };
+}
+
+static Vector transceiverStatesFromWebRTCBin(GstElement* webrtcBin)
+{
+ Vector states;
+ GRefPtr transceivers;
+ g_signal_emit_by_name(webrtcBin, "get-transceivers", &transceivers.outPtr());
+ GST_TRACE_OBJECT(webrtcBin, "Filling transceiver states for %u transceivers", transceivers ? transceivers->len : 0);
+ if (!transceivers || !transceivers->len)
+ return states;
+
+ states.reserveInitialCapacity(transceivers->len);
+ for (unsigned i = 0; i < transceivers->len; i++) {
+ GstWebRTCRTPTransceiver* transceiver = g_array_index(transceivers.get(), GstWebRTCRTPTransceiver*, i);
+ auto state = toGStreamerMediaEndpointTransceiverState(webrtcBin, transceiver);
+ if (!state)
+ continue;
+ states.append(WTFMove(*state));
+ }
+
+ return states;
+}
+
void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* description)
{
RefPtr initialDescription = description;
@@ -463,9 +575,30 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription*
}
}
+#ifndef GST_DISABLE_GST_DEBUG
+ auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".setLocalDescription"_s);
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
+#endif
+
+ auto rtcTransceiverStates = transceiverStatesFromWebRTCBin(m_webrtcBin.get());
+ auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState {
+ auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr {
+ return &mediaStreamFromRTCStream(id);
+ });
+ return { WTFMove(state.mid), WTFMove(streams), state.firedDirection };
+ });
+
GRefPtr transport;
g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr);
- m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), transport ? makeUnique(WTFMove(transport)) : nullptr);
+
+ std::optional maxMessageSize;
+ if (transport) {
+ uint64_t maxMessageSizeValue;
+ g_object_get(transport.get(), "max-message-size", &maxMessageSizeValue, nullptr);
+ maxMessageSize = static_cast(maxMessageSizeValue);
+ }
+
+ m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize);
}, [protectedThis = Ref(*this), this](const GError* error) {
if (protectedThis->isStopped())
return;
@@ -480,6 +613,22 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription*
});
}
+void GStreamerMediaEndpoint::setTransceiverCodecPreferences(const GstSDPMedia& media, guint transceiverIdx)
+{
+ auto direction = getDirectionFromSDPMedia(&media);
+ if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE)
+ return;
+
+ GRefPtr rtcTransceiver;
+ g_signal_emit_by_name(m_webrtcBin.get(), "get-transceiver", transceiverIdx, &rtcTransceiver.outPtr());
+ if (!rtcTransceiver)
+ return;
+
+ auto caps = capsFromSDPMedia(&media);
+ GST_TRACE_OBJECT(m_webrtcBin.get(), "Setting codec-preferences to %" GST_PTR_FORMAT, caps.get());
+ g_object_set(rtcTransceiver.get(), "codec-preferences", caps.get(), nullptr);
+}
+
void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& description)
{
auto initialSDP = description.sdp().isolatedCopy();
@@ -491,17 +640,9 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription&
unsigned numberOfMedias = gst_sdp_message_medias_len(&message);
for (unsigned i = 0; i < numberOfMedias; i++) {
const auto* media = gst_sdp_message_get_media(&message, i);
- auto direction = getDirectionFromSDPMedia(media);
- if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE)
- continue;
-
- GRefPtr rtcTransceiver;
- g_signal_emit_by_name(m_webrtcBin.get(), "get-transceiver", i, &rtcTransceiver.outPtr());
- if (!rtcTransceiver)
+ if (UNLIKELY(!media))
continue;
-
- auto caps = capsFromSDPMedia(media);
- g_object_set(rtcTransceiver.get(), "codec-preferences", caps.get(), nullptr);
+ setTransceiverCodecPreferences(*media, i);
}
}, [protectedThis = Ref(*this), this, initialSDP = WTFMove(initialSDP), localDescriptionSdp = WTFMove(localDescriptionSdp), localDescriptionSdpType = WTFMove(localDescriptionSdpType)](const GstSDPMessage& message) {
if (protectedThis->isStopped())
@@ -537,9 +678,30 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription&
}
}
+#ifndef GST_DISABLE_GST_DEBUG
+ auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".setRemoteDescription"_s);
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
+#endif
+
+ auto rtcTransceiverStates = transceiverStatesFromWebRTCBin(m_webrtcBin.get());
+ auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState {
+ auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr {
+ return &mediaStreamFromRTCStream(id);
+ });
+ return { WTFMove(state.mid), WTFMove(streams), state.firedDirection };
+ });
+
GRefPtr transport;
g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr);
- m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), transport ? makeUnique(WTFMove(transport)) : nullptr);
+
+ std::optional maxMessageSize;
+ if (transport) {
+ uint64_t maxMessageSizeValue;
+ g_object_get(transport.get(), "max-message-size", &maxMessageSizeValue, nullptr);
+ maxMessageSize = static_cast(maxMessageSizeValue);
+ }
+
+ m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize);
}, [protectedThis = Ref(*this), this](const GError* error) {
if (protectedThis->isStopped())
return;
@@ -565,6 +727,8 @@ WEBKIT_DEFINE_ASYNC_DATA_STRUCT(SetDescriptionCallData)
void GStreamerMediaEndpoint::setDescription(const RTCSessionDescription* description, DescriptionType descriptionType, Function&& preProcessCallback, Function&& successCallback, Function&& failureCallback)
{
+ GST_DEBUG_OBJECT(m_webrtcBin.get(), "Setting %s description", descriptionType == DescriptionType::Local ? "local" : "remote");
+
GUniqueOutPtr message;
auto sdpType = RTCSdpType::Offer;
@@ -979,48 +1143,49 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data)
GST_WARNING_OBJECT(m_pipeline.get(), "SDP media for transceiver %u not found, skipping incoming track setup", mLineIndex);
return;
}
-
transceiver = &m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), data.type, trackIdFromSDPMedia(*media));
}
- m_peerConnectionBackend.addPendingTrackEvent({ Ref(transceiver->receiver()), Ref(transceiver->receiver().track()), { }, Ref(*transceiver) });
-
auto mediaStreamBin = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_pipeline.get()), data.mediaStreamBinName.ascii().data()));
- auto tee = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(mediaStreamBin.get()), "tee"));
- GstElement* bin = nullptr;
auto& track = transceiver->receiver().track();
auto& source = track.privateTrack().source();
if (source.isIncomingAudioSource()) {
auto& audioSource = static_cast(source);
- audioSource.setUpstreamBin(mediaStreamBin);
- audioSource.setIsUpstreamDecoding(data.isUpstreamDecoding);
- bin = audioSource.bin();
+ if (!audioSource.setBin(mediaStreamBin))
+ return;
} else if (source.isIncomingVideoSource()) {
auto& videoSource = static_cast(source);
- videoSource.setUpstreamBin(mediaStreamBin);
- videoSource.setIsUpstreamDecoding(data.isUpstreamDecoding);
- bin = videoSource.bin();
+ if (!videoSource.setBin(mediaStreamBin))
+ return;
}
- ASSERT(bin);
- gst_bin_add(GST_BIN_CAST(m_pipeline.get()), bin);
+ m_pendingIncomingMediaStreamIDs.append(data.mediaStreamId);
- auto& mediaStream = mediaStreamFromRTCStream(data.mediaStreamId);
- mediaStream.addTrackFromPlatform(track);
+ unsigned totalExpectedMediaTracks = 0;
+ for (unsigned i = 0; i < gst_sdp_message_medias_len(description->sdp); i++) {
+ const auto media = gst_sdp_message_get_media(description->sdp, i);
+ const char* mediaType = gst_sdp_media_get_media(media);
+ if (g_str_equal(mediaType, "audio") || g_str_equal(mediaType, "video"))
+ totalExpectedMediaTracks++;
+ }
- for (auto& processor : m_trackProcessors.values()) {
- if (!processor->isReady())
- return;
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Expecting %u media tracks", totalExpectedMediaTracks);
+ if (m_pendingIncomingMediaStreamIDs.size() < totalExpectedMediaTracks) {
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Only %zu track(s) received so far", m_pendingIncomingMediaStreamIDs.size());
+ return;
}
- GST_DEBUG_OBJECT(m_pipeline.get(), "Incoming streams gathered, now dispatching track events");
- m_peerConnectionBackend.dispatchPendingTrackEvents(mediaStream);
- gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
+ for (auto& mediaStreamID : m_pendingIncomingMediaStreamIDs) {
+ auto& mediaStream = mediaStreamFromRTCStream(mediaStreamID);
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Incoming stream %s ready, notifying observers", mediaStreamID.ascii().data());
+ mediaStream.privateStream().forEachTrack([](auto& track) {
+ GST_DEBUG("Incoming stream has track %s", track.id().ascii().data());
+ track.dataFlowStarted();
+ });
+ }
-#ifndef GST_DISABLE_GST_DEBUG
- auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".connected-"_s, data.mediaStreamId);
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
-#endif
+ m_pendingIncomingMediaStreamIDs.clear();
+ gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
}
void GStreamerMediaEndpoint::connectPad(GstPad* pad)
@@ -1036,7 +1201,7 @@ void GStreamerMediaEndpoint::connectPad(GstPad* pad)
auto sinkPad = adoptGRef(gst_element_get_static_pad(bin, "sink"));
gst_pad_link(pad, sinkPad.get());
- gst_element_sync_state_with_parent(bin);
+ gst_element_set_state(bin, GST_STATE_PAUSED);
#ifndef GST_DISABLE_GST_DEBUG
auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".pending-"_s, GST_OBJECT_NAME(pad));
diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h
index f33550eef92b4..7461e74bae71f 100644
--- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h
+++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h
@@ -162,6 +162,8 @@ class GStreamerMediaEndpoint : public ThreadSafeRefCounted isIceGatheringComplete(const String& currentLocalDescription);
+ void setTransceiverCodecPreferences(const GstSDPMedia&, guint transceiverIdx);
+
#if !RELEASE_LOG_DISABLED
void gatherStatsForLogging();
void startLoggingStats();
@@ -204,6 +206,8 @@ class GStreamerMediaEndpoint : public ThreadSafeRefCounted m_ssrcGenerator;
HashMap, RefPtr> m_trackProcessors;
+
+ Vector m_pendingIncomingMediaStreamIDs;
};
} // namespace WebCore
diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp
index 453040150ae5b..b73079f67fa28 100644
--- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp
+++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp
@@ -323,24 +323,6 @@ void GStreamerPeerConnectionBackend::collectTransceivers()
m_endpoint->collectTransceivers();
}
-void GStreamerPeerConnectionBackend::addPendingTrackEvent(PendingTrackEvent&& event)
-{
- m_pendingTrackEvents.append(WTFMove(event));
-}
-
-void GStreamerPeerConnectionBackend::dispatchPendingTrackEvents(MediaStream& mediaStream)
-{
- auto events = WTFMove(m_pendingTrackEvents);
- for (auto& event : events) {
- Vector> pendingStreams;
- pendingStreams.reserveInitialCapacity(1);
- pendingStreams.uncheckedAppend(&mediaStream);
- event.streams = WTFMove(pendingStreams);
-
- dispatchTrackEvent(event);
- }
-}
-
void GStreamerPeerConnectionBackend::removeTrack(RTCRtpSender& sender)
{
m_endpoint->removeTrack(backendFromRTPSender(sender));
diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h
index 3ef339e3e41b8..cde33987becd7 100644
--- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h
+++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h
@@ -98,9 +98,6 @@ class GStreamerPeerConnectionBackend final : public PeerConnectionBackend {
void collectTransceivers() final;
- void addPendingTrackEvent(PendingTrackEvent&&);
- void dispatchPendingTrackEvents(MediaStream&);
-
bool isLocalDescriptionSet() const final { return m_isLocalDescriptionSet; }
template
@@ -120,8 +117,6 @@ class GStreamerPeerConnectionBackend final : public PeerConnectionBackend {
bool m_isLocalDescriptionSet { false };
bool m_isRemoteDescriptionSet { false };
- Vector m_pendingTrackEvents;
-
bool m_isReconfiguring { false };
};
diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp
index 93a45b645c9c0..2a5fa2f638c9d 100644
--- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp
+++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp
@@ -109,7 +109,7 @@ bool GStreamerRtpTransceiverBackend::stopped() const
return m_isStopped;
}
-static inline WARN_UNUSED_RETURN ExceptionOr toRtpCodecCapability(const RTCRtpCodecCapability& codec, int& dynamicPayloadType)
+static inline WARN_UNUSED_RETURN ExceptionOr toRtpCodecCapability(const RTCRtpCodecCapability& codec, int& dynamicPayloadType, const char* msid)
{
if (!codec.mimeType.startsWith("video/"_s) && !codec.mimeType.startsWith("audio/"_s))
return Exception { ExceptionCode::InvalidModificationError, "RTCRtpCodecCapability bad mimeType"_s };
@@ -135,16 +135,33 @@ static inline WARN_UNUSED_RETURN ExceptionOr toRtpCodecCapability(cons
}
}
+ if (msid)
+ gst_caps_set_simple(caps, "a-msid", G_TYPE_STRING, msid, nullptr);
+
GST_DEBUG("Codec capability: %" GST_PTR_FORMAT, caps);
return caps;
}
+static GUniquePtr getMsidFromCurrentCodecPreferences(GstWebRTCRTPTransceiver* transceiver)
+{
+ GRefPtr currentCaps;
+ GUniquePtr msid;
+ g_object_get(transceiver, "codec-preferences", ¤tCaps.outPtr(), nullptr);
+ GST_TRACE_OBJECT(transceiver, "Current codec preferences: %" GST_PTR_FORMAT, currentCaps.get());
+ if (gst_caps_get_size(currentCaps.get()) > 0) {
+ auto* s = gst_caps_get_structure(currentCaps.get(), 0);
+ msid = GUniquePtr(g_strdup(gst_structure_get_string(s, "a-msid")));
+ }
+ return msid;
+}
+
ExceptionOr GStreamerRtpTransceiverBackend::setCodecPreferences(const Vector& codecs)
{
auto gstCodecs = adoptGRef(gst_caps_new_empty());
+ GUniquePtr msid = getMsidFromCurrentCodecPreferences(m_rtcTransceiver.get());
int dynamicPayloadType = 96;
for (auto& codec : codecs) {
- auto result = toRtpCodecCapability(codec, dynamicPayloadType);
+ auto result = toRtpCodecCapability(codec, dynamicPayloadType, msid.get());
if (result.hasException())
return result.releaseException();
gst_caps_append(gstCodecs.get(), result.releaseReturnValue());
diff --git a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp
index 1de61cc018b90..0344d8646d66a 100644
--- a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp
+++ b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp
@@ -309,32 +309,16 @@ void LibWebRTCMediaEndpoint::OnSignalingChange(webrtc::PeerConnectionInterface::
{
}
-MediaStream& LibWebRTCMediaEndpoint::mediaStreamFromRTCStream(webrtc::MediaStreamInterface& rtcStream)
+MediaStream& LibWebRTCMediaEndpoint::mediaStreamFromRTCStreamId(const String& id)
{
- auto label = fromStdString(rtcStream.id());
- auto mediaStream = m_remoteStreamsById.ensure(label, [label, this]() mutable {
+ auto mediaStream = m_remoteStreamsById.ensure(id, [id, this]() mutable {
auto& document = downcast(*m_peerConnectionBackend.connection().scriptExecutionContext());
- return MediaStream::create(document, MediaStreamPrivate::create(document.logger(), { }, WTFMove(label)));
+ auto stream = MediaStream::create(document, MediaStreamPrivate::create(document.logger(), { }, String(id)));
+ return stream;
});
return *mediaStream.iterator->value;
}
-void LibWebRTCMediaEndpoint::addPendingTrackEvent(Ref&& receiver, MediaStreamTrack& track, const std::vector>& rtcStreams, RefPtr&& transceiver)
-{
- Vector> streams;
- for (auto& rtcStream : rtcStreams) {
- auto& mediaStream = mediaStreamFromRTCStream(*rtcStream.get());
- streams.append(&mediaStream);
- mediaStream.addTrackFromPlatform(track);
- }
- auto streamIds = WTF::map(streams, [](auto& stream) -> String {
- return stream->id();
- });
- m_remoteStreamsFromRemoteTrack.add(&track, WTFMove(streamIds));
-
- m_peerConnectionBackend.addPendingTrackEvent({ WTFMove(receiver), track, WTFMove(streams), WTFMove(transceiver) });
-}
-
void LibWebRTCMediaEndpoint::collectTransceivers()
{
if (!m_backend)
@@ -348,7 +332,7 @@ void LibWebRTCMediaEndpoint::collectTransceivers()
continue;
auto rtcReceiver = rtcTransceiver->receiver();
- m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), rtcReceiver->media_type() == cricket::MEDIA_TYPE_AUDIO ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video);
+ existingTransceiver = &m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), rtcReceiver->media_type() == cricket::MEDIA_TYPE_AUDIO ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video);
}
}
@@ -359,37 +343,6 @@ std::optional LibWebRTCMediaEndpoint::canTrickleIceCandidates() const
return *m_backend->can_trickle_ice_candidates();
}
-void LibWebRTCMediaEndpoint::newTransceiver(rtc::scoped_refptr&& rtcTransceiver)
-{
- auto rtcReceiver = rtcTransceiver->receiver();
- auto* transceiver = m_peerConnectionBackend.existingTransceiver([&](auto& transceiverBackend) {
- return rtcTransceiver.get() == transceiverBackend.rtcTransceiver();
- });
- if (!transceiver)
- transceiver = &m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), rtcReceiver->media_type() == cricket::MEDIA_TYPE_AUDIO ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video);
-
- addPendingTrackEvent(transceiver->receiver(), transceiver->receiver().track(), rtcReceiver->streams(), transceiver);
-}
-
-void LibWebRTCMediaEndpoint::removeRemoteTrack(rtc::scoped_refptr&& receiver)
-{
- auto* transceiver = m_peerConnectionBackend.existingTransceiver([&receiver](auto& transceiverBackend) {
- auto* rtcTransceiver = transceiverBackend.rtcTransceiver();
- return rtcTransceiver && receiver.get() == rtcTransceiver->receiver().get();
- });
- if (!transceiver)
- return;
-
- auto& track = transceiver->receiver().track();
-
- for (auto& id : m_remoteStreamsFromRemoteTrack.get(&track)) {
- if (auto stream = m_remoteStreamsById.get(id))
- stream->privateStream().removeTrack(track.privateTrack());
- }
-
- track.source().setMuted(true);
-}
-
template
ExceptionOr LibWebRTCMediaEndpoint::createTransceiverBackends(T&& trackOrKind, webrtc::RtpTransceiverInit&& init, LibWebRTCRtpSenderBackend::Source&& source)
{
@@ -451,24 +404,6 @@ std::unique_ptr LibWebRTCMediaEndpoint::transcei
return nullptr;
}
-void LibWebRTCMediaEndpoint::OnTrack(rtc::scoped_refptr transceiver)
-{
- callOnMainThread([protectedThis = Ref { *this }, transceiver = WTFMove(transceiver)]() mutable {
- if (protectedThis->isStopped())
- return;
- protectedThis->newTransceiver(WTFMove(transceiver));
- });
-}
-
-void LibWebRTCMediaEndpoint::OnRemoveTrack(rtc::scoped_refptr receiver)
-{
- callOnMainThread([protectedThis = Ref { *this }, receiver = WTFMove(receiver)]() mutable {
- if (protectedThis->isStopped())
- return;
- protectedThis->removeRemoteTrack(WTFMove(receiver));
- });
-}
-
std::unique_ptr LibWebRTCMediaEndpoint::createDataChannel(const String& label, const RTCDataChannelInit& options)
{
auto init = LibWebRTCDataChannelHandler::fromRTCDataChannelInit(options);
@@ -647,7 +582,7 @@ static std::optional descriptionsFromP
void LibWebRTCMediaEndpoint::addIceCandidate(std::unique_ptr&& candidate, PeerConnectionBackend::AddIceCandidateCallback&& callback)
{
m_backend->AddIceCandidate(WTFMove(candidate), [task = createSharedTask(WTFMove(callback)), backend = m_backend](auto&& error) mutable {
- callOnMainThread([task = WTFMove(task), descriptions = descriptionsFromPeerConnection(backend.get()), error = WTFMove(error)]() mutable {
+ callOnMainThread([task = WTFMove(task), descriptions = crossThreadCopy(descriptionsFromPeerConnection(backend.get())), error = WTFMove(error)]() mutable {
if (!error.ok()) {
task->run(toException(error));
return;
@@ -666,7 +601,7 @@ void LibWebRTCMediaEndpoint::OnIceCandidate(const webrtc::IceCandidateInterface
auto sdpMLineIndex = safeCast(rtcCandidate->sdp_mline_index());
- callOnMainThread([protectedThis = Ref { *this }, descriptions = descriptionsFromPeerConnection(m_backend.get()), mid = fromStdString(rtcCandidate->sdp_mid()), sdp = fromStdString(sdp), sdpMLineIndex, url = fromStdString(rtcCandidate->server_url())]() mutable {
+ callOnMainThread([protectedThis = Ref { *this }, descriptions = crossThreadCopy(descriptionsFromPeerConnection(m_backend.get())), mid = fromStdString(rtcCandidate->sdp_mid()), sdp = fromStdString(sdp), sdpMLineIndex, url = fromStdString(rtcCandidate->server_url())]() mutable {
if (protectedThis->isStopped())
return;
protectedThis->m_peerConnectionBackend.newICECandidate(WTFMove(sdp), WTFMove(mid), sdpMLineIndex, WTFMove(url), WTFMove(descriptions));
@@ -709,6 +644,7 @@ class SctpTransportState {
public:
explicit SctpTransportState(rtc::scoped_refptr&&);
std::unique_ptr createBackend();
+ std::optional maxMessageSize() const;
private:
rtc::scoped_refptr m_transport;
@@ -729,12 +665,73 @@ std::unique_ptr SctpTransportState::createBackend
return makeUnique(WTFMove(m_transport), m_information.dtls_transport());
}
+std::optional SctpTransportState::maxMessageSize() const
+{
+ return m_information.MaxMessageSize() ? std::make_optional(*m_information.MaxMessageSize()) : std::nullopt;
+}
+
+struct LibWebRTCMediaEndpointTransceiverState {
+ String mid;
+ Vector receiverStreamIds;
+ std::optional firedDirection;
+
+ LibWebRTCMediaEndpointTransceiverState isolatedCopy() &&;
+};
+
+inline LibWebRTCMediaEndpointTransceiverState LibWebRTCMediaEndpointTransceiverState::isolatedCopy() &&
+{
+ return {
+ WTFMove(mid).isolatedCopy(),
+ crossThreadCopy(WTFMove(receiverStreamIds)),
+ firedDirection
+ };
+}
+
+static LibWebRTCMediaEndpointTransceiverState toLibWebRTCMediaEndpointTransceiverState(const webrtc::RtpTransceiverInterface& transceiver)
+{
+ String mid;
+ if (auto rtcMid = transceiver.mid())
+ mid = fromStdString(*rtcMid);
+ std::optional firedDirection;
+ if (auto rtcFiredDirection = transceiver.fired_direction())
+ firedDirection = toRTCRtpTransceiverDirection(*rtcFiredDirection);
+
+ auto rtcStreamIds = transceiver.receiver()->stream_ids();
+ Vector streamIds;
+ streamIds.reserveInitialCapacity(rtcStreamIds.size());
+ for (auto& streamId : rtcStreamIds)
+ streamIds.uncheckedAppend(fromStdString(streamId));
+
+ return { WTFMove(mid), WTFMove(streamIds), firedDirection };
+}
+
+static Vector transceiverStatesFromPeerConnection(webrtc::PeerConnectionInterface& connection)
+{
+ auto transceivers = connection.GetTransceivers();
+ Vector states;
+ states.reserveInitialCapacity(transceivers.size());
+ for (auto& transceiver : transceivers)
+ states.uncheckedAppend(toLibWebRTCMediaEndpointTransceiverState(*transceiver));
+
+ return states;
+}
+
void LibWebRTCMediaEndpoint::setLocalSessionDescriptionSucceeded()
{
- callOnMainThread([protectedThis = Ref { *this }, descriptions = descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable {
+ if (!m_backend)
+ return;
+
+ callOnMainThread([protectedThis = Ref { *this }, this, descriptions = crossThreadCopy(descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes)), rtcTransceiverStates = crossThreadCopy(transceiverStatesFromPeerConnection(*m_backend)), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable {
if (protectedThis->isStopped())
return;
- protectedThis->m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), sctpState.createBackend());
+
+ auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState {
+ auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr {
+ return &mediaStreamFromRTCStreamId(id);
+ });
+ return { WTFMove(state.mid), WTFMove(streams), state.firedDirection };
+ });
+ protectedThis->m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend(), sctpState.maxMessageSize());
});
}
@@ -746,13 +743,22 @@ void LibWebRTCMediaEndpoint::setLocalSessionDescriptionFailed(ExceptionCode erro
protectedThis->m_peerConnectionBackend.setLocalDescriptionFailed(Exception { errorCode, WTFMove(errorMessage) });
});
}
-
void LibWebRTCMediaEndpoint::setRemoteSessionDescriptionSucceeded()
{
- callOnMainThread([protectedThis = Ref { *this }, descriptions = descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable {
+ if (!m_backend)
+ return;
+
+ callOnMainThread([protectedThis = Ref { *this }, this, descriptions = crossThreadCopy(descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes)), rtcTransceiverStates = crossThreadCopy(transceiverStatesFromPeerConnection(*m_backend)), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable {
if (protectedThis->isStopped())
return;
- protectedThis->m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), sctpState.createBackend());
+
+ auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState {
+ auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr {
+ return &mediaStreamFromRTCStreamId(id);
+ });
+ return { WTFMove(state.mid), WTFMove(streams), state.firedDirection };
+ });
+ protectedThis->m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend(), sctpState.maxMessageSize());
});
}
diff --git a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h
index 62dc91d39c165..a4980ba801172 100644
--- a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h
+++ b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h
@@ -128,8 +128,6 @@ class LibWebRTCMediaEndpoint
// webrtc::PeerConnectionObserver API
void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState) final;
void OnDataChannel(rtc::scoped_refptr) final;
- void OnTrack(rtc::scoped_refptr) final;
- void OnRemoveTrack(rtc::scoped_refptr) final;
void OnNegotiationNeededEvent(uint32_t) final;
void OnStandardizedIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState) final;
@@ -143,10 +141,6 @@ class LibWebRTCMediaEndpoint
void setLocalSessionDescriptionFailed(ExceptionCode, const char*);
void setRemoteSessionDescriptionSucceeded();
void setRemoteSessionDescriptionFailed(ExceptionCode, const char*);
- void newTransceiver(rtc::scoped_refptr&&);
- void removeRemoteTrack(rtc::scoped_refptr&&);
-
- void addPendingTrackEvent(Ref&&, MediaStreamTrack&, const std::vector>&, RefPtr&&);
template
ExceptionOr createTransceiverBackends(T&&, webrtc::RtpTransceiverInit&&, LibWebRTCRtpSenderBackend::Source&&);
@@ -158,7 +152,7 @@ class LibWebRTCMediaEndpoint
rtc::scoped_refptr createStatsCollector(Ref&&);
- MediaStream& mediaStreamFromRTCStream(webrtc::MediaStreamInterface&);
+ MediaStream& mediaStreamFromRTCStreamId(const String&);
void AddRef() const { ref(); }
rtc::RefCountReleaseStatus Release() const
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
index 2fc5d4f45a8ea..7c99426aa1c95 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
@@ -1005,6 +1005,18 @@ bool gstObjectHasProperty(GstPad* pad, const char* name)
return gstObjectHasProperty(GST_OBJECT_CAST(pad), name);
}
+std::optional gstGetAutoplugSelectResult(ASCIILiteral nick)
+{
+ static auto enumClass = static_cast(g_type_class_ref(g_type_from_name("GstAutoplugSelectResult")));
+ RELEASE_ASSERT(enumClass);
+ auto enumValue = g_enum_get_value_by_nick(enumClass, nick.characters());
+ if (!enumValue)
+ return std::nullopt;
+ return enumValue->value;
+}
+
+#undef GST_CAT_DEFAULT
+
} // namespace WebCore
#if !GST_CHECK_VERSION(1, 20, 0)
diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
index e1b67aff32471..6e505a7c43bf4 100644
--- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
+++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
@@ -409,6 +409,8 @@ void configureVideoRTPDepayloader(GstElement*);
bool gstObjectHasProperty(GstElement*, const char* name);
bool gstObjectHasProperty(GstPad*, const char* name);
+std::optional gstGetAutoplugSelectResult(ASCIILiteral);
+
void registerActivePipeline(const GRefPtr&);
void unregisterPipeline(const GRefPtr&);
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
index bc5dc486bfd74..4a82a4e19aef8 100644
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
@@ -3042,6 +3042,10 @@ void MediaPlayerPrivateGStreamer::configureVideoDecoder(GstElement* decoder)
g_object_set(decoder, "max-errors", -1, nullptr);
auto pad = adoptGRef(gst_element_get_static_pad(decoder, "src"));
+ if (!pad) {
+ GST_INFO_OBJECT(pipeline(), "the decoder %s does not have a src pad, probably because it's a hardware decoder sink, can't get decoder stats", name.get());
+ return;
+ }
gst_pad_add_probe(pad.get(), static_cast(GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER), [](GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
auto* player = static_cast(userData);
if (GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER) {
@@ -3595,8 +3599,21 @@ void MediaPlayerPrivateGStreamer::updateVideoSizeAndOrientationFromCaps(const Gs
if (m_videoSourceOrientation.usesWidthAsHeight())
originalSize = originalSize.transposedSize();
+ auto scopeExit = makeScopeExit([&] {
+ if (auto* player = m_player) {
+ GST_DEBUG_OBJECT(pipeline(), "Notifying sizeChanged event to upper layer");
+ player->sizeChanged();
+ }
+ });
+
GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
- GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
+ if (isMediaStreamPlayer()) {
+ GST_DEBUG_OBJECT(pipeline(), "Using original MediaStream track video intrinsic size");
+ m_videoSize = originalSize;
+ return;
+ }
+
+ GST_DEBUG_OBJECT(pipeline(), "Applying pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
// Calculate DAR based on PAR and video size.
int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
@@ -3625,7 +3642,6 @@ void MediaPlayerPrivateGStreamer::updateVideoSizeAndOrientationFromCaps(const Gs
GST_DEBUG_OBJECT(pipeline(), "Saving natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
m_videoSize = FloatSize(static_cast(width), static_cast(height));
- m_player->sizeChanged();
}
void MediaPlayerPrivateGStreamer::setCachedPosition(const MediaTime& cachedPosition) const
diff --git a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp
index 0ef739ee4c562..50528cdc2cf8a 100644
--- a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp
+++ b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp
@@ -51,17 +51,6 @@ GStreamerQuirkWesteros::GStreamerQuirkWesteros()
}
}
-void GStreamerQuirkWesteros::configureElement(GstElement* element, const OptionSet& characteristics)
-{
- if (!characteristics.contains(ElementRuntimeCharacteristics::IsMediaStream))
- return;
-
- if (!g_strcmp0(G_OBJECT_TYPE_NAME(G_OBJECT(element)), "GstWesterosSink") && gstObjectHasProperty(element, "immediate-output")) {
- GST_INFO("Enable 'immediate-output' in WesterosSink");
- g_object_set(element, "immediate-output", TRUE, nullptr);
- }
-}
-
std::optional GStreamerQuirkWesteros::isHardwareAccelerated(GstElementFactory* factory)
{
if (g_str_has_prefix(GST_OBJECT_NAME(factory), "westeros"))
diff --git a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h
index 518e512ecafec..59b62a0505fe6 100644
--- a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h
+++ b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h
@@ -31,7 +31,6 @@ class GStreamerQuirkWesteros final : public GStreamerQuirk {
GStreamerQuirkWesteros();
const char* identifier() final { return "Westeros"; }
- void configureElement(GstElement*, const OptionSet&) final;
std::optional isHardwareAccelerated(GstElementFactory*) final;
private:
diff --git a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp
index 757ffe5d78173..189e93ef0d3dd 100644
--- a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp
+++ b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp
@@ -110,7 +110,7 @@ void MediaStreamTrackPrivate::setContentHint(HintValue hintValue)
{
m_contentHint = hintValue;
}
-
+
bool MediaStreamTrackPrivate::muted() const
{
return m_source->muted();
@@ -121,6 +121,13 @@ bool MediaStreamTrackPrivate::isCaptureTrack() const
return m_source->isCaptureSource();
}
+void MediaStreamTrackPrivate::dataFlowStarted()
+{
+ forEachObserver([this](auto& observer) {
+ observer.dataFlowStarted(*this);
+ });
+}
+
void MediaStreamTrackPrivate::setEnabled(bool enabled)
{
if (m_isEnabled == enabled)
diff --git a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h
index 9ccf0fc68f453..739516955f7ea 100644
--- a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h
+++ b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h
@@ -59,6 +59,7 @@ class MediaStreamTrackPrivate final
virtual void trackSettingsChanged(MediaStreamTrackPrivate&) = 0;
virtual void trackEnabledChanged(MediaStreamTrackPrivate&) = 0;
virtual void readyStateChanged(MediaStreamTrackPrivate&) { };
+ virtual void dataFlowStarted(MediaStreamTrackPrivate&) { };
};
static Ref create(Ref&&, Ref&&);
@@ -76,13 +77,14 @@ class MediaStreamTrackPrivate final
enum class HintValue { Empty, Speech, Music, Motion, Detail, Text };
HintValue contentHint() const { return m_contentHint; }
void setContentHint(HintValue);
-
+
void startProducingData() { m_source->start(); }
void stopProducingData() { m_source->stop(); }
bool isProducingData() { return m_source->isProducingData(); }
bool muted() const;
void setMuted(bool muted) { m_source->setMuted(muted); }
+ void dataFlowStarted();
bool isCaptureTrack() const;
diff --git a/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h b/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h
index 7c0d67db592a3..15d7136bdc131 100644
--- a/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h
+++ b/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h
@@ -27,6 +27,8 @@
#if ENABLE(WEB_RTC)
+#include
+
namespace WebCore {
enum class RTCRtpTransceiverDirection {
@@ -36,6 +38,8 @@ enum class RTCRtpTransceiverDirection {
Inactive
};
+String convertEnumerationToString(RTCRtpTransceiverDirection); // in JSRTCRtpTransceiverDirection.h
+
} // namespace WebCore
#endif
diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp
index c429779937d54..575061a5a502b 100644
--- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp
+++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp
@@ -22,6 +22,7 @@
#if USE(GSTREAMER_WEBRTC)
#include "GStreamerCommon.h"
+#include "GStreamerQuirks.h"
#include "GStreamerRegistryScanner.h"
GST_DEBUG_CATEGORY(webkit_webrtc_incoming_track_processor_debug);
@@ -41,33 +42,69 @@ void GStreamerIncomingTrackProcessor::configure(GStreamerMediaEndpoint* endPoint
{
m_endPoint = endPoint;
m_pad = WTFMove(pad);
- m_data.mediaStreamBinName = makeString(GST_OBJECT_NAME(m_pad.get()));
- m_bin = gst_bin_new(m_data.mediaStreamBinName.ascii().data());
auto caps = adoptGRef(gst_pad_get_current_caps(m_pad.get()));
if (!caps)
caps = adoptGRef(gst_pad_query_caps(m_pad.get(), nullptr));
- GST_DEBUG_OBJECT(m_bin.get(), "Processing track with caps %" GST_PTR_FORMAT, caps.get());
- m_data.type = doCapsHaveType(caps.get(), "audio") ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video;
+ ASCIILiteral typeName;
+ if (doCapsHaveType(caps.get(), "audio")) {
+ typeName = "audio"_s;
+ m_data.type = RealtimeMediaSource::Type::Audio;
+ } else {
+ typeName = "video"_s;
+ m_data.type = RealtimeMediaSource::Type::Video;
+ }
m_data.caps = WTFMove(caps);
+ m_data.mediaStreamBinName = makeString("incoming-"_s, typeName, "-track-"_s, GST_OBJECT_NAME(m_pad.get()));
+ m_bin = gst_bin_new(m_data.mediaStreamBinName.ascii().data());
+ GST_DEBUG_OBJECT(m_bin.get(), "Processing track with caps %" GST_PTR_FORMAT, m_data.caps.get());
+
g_object_get(m_pad.get(), "transceiver", &m_data.transceiver.outPtr(), nullptr);
- retrieveMediaStreamAndTrackIdFromSDP();
+
+ auto structure = gst_caps_get_structure(m_data.caps.get(), 0);
+ unsigned ssrc;
+ if (gst_structure_get_uint(structure, "ssrc", &ssrc)) {
+ auto msIdAttributeName = makeString("ssrc-"_s, ssrc, "-msid"_s);
+ auto msIdAttribute = gst_structure_get_string(structure, msIdAttributeName.ascii().data());
+ auto components = String::fromUTF8(msIdAttribute).split(' ');
+ if (components.size() == 2)
+ m_sdpMsIdAndTrackId = { components[0], components[1] };
+ }
+
+ if (m_sdpMsIdAndTrackId.second.isEmpty())
+ retrieveMediaStreamAndTrackIdFromSDP();
+
m_data.mediaStreamId = mediaStreamIdFromPad();
if (!m_sdpMsIdAndTrackId.second.isEmpty())
m_data.trackId = m_sdpMsIdAndTrackId.second;
- m_tee = gst_element_factory_make("tee", "tee");
- g_object_set(m_tee.get(), "allow-not-linked", TRUE, nullptr);
+ m_sink = gst_element_factory_make("fakesink", "sink");
+ g_object_set(m_sink.get(), "sync", TRUE, "enable-last-sample", FALSE, nullptr);
+ auto queue = gst_element_factory_make("queue", "queue");
auto trackProcessor = incomingTrackProcessor();
- m_data.isUpstreamDecoding = m_isDecoding;
- gst_bin_add_many(GST_BIN_CAST(m_bin.get()), m_tee.get(), trackProcessor.get(), nullptr);
+ gst_bin_add_many(GST_BIN_CAST(m_bin.get()), trackProcessor.get(), queue, m_sink.get(), nullptr);
+ gst_element_link(queue, m_sink.get());
+
auto sinkPad = adoptGRef(gst_element_get_static_pad(trackProcessor.get(), "sink"));
gst_element_add_pad(m_bin.get(), gst_ghost_pad_new("sink", sinkPad.get()));
+
+ if (m_data.type != RealtimeMediaSource::Type::Video || !m_isDecoding)
+ return;
+
+ auto sinkSinkPad = adoptGRef(gst_element_get_static_pad(m_sink.get(), "sink"));
+ gst_pad_add_probe(sinkSinkPad.get(), GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, reinterpret_cast(+[](GstPad*, GstPadProbeInfo* info, gpointer) -> GstPadProbeReturn {
+ auto query = GST_PAD_PROBE_INFO_QUERY(info);
+ if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION)
+ return GST_PAD_PROBE_OK;
+
+ gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr);
+ return GST_PAD_PROBE_REMOVE;
+ }), nullptr, nullptr);
}
String GStreamerIncomingTrackProcessor::mediaStreamIdFromPad()
@@ -128,8 +165,10 @@ GRefPtr GStreamerIncomingTrackProcessor::incomingTrackProcessor()
if (m_data.type == RealtimeMediaSource::Type::Audio)
return createParser();
- GST_DEBUG_OBJECT(m_bin.get(), "Requesting a key-frame");
- gst_pad_send_event(m_pad.get(), gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, TRUE, 1));
+ if (m_data.type == RealtimeMediaSource::Type::Video) {
+ GST_DEBUG_OBJECT(m_bin.get(), "Requesting a key-frame");
+ gst_pad_send_event(m_pad.get(), gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, TRUE, 1));
+ }
bool forceEarlyVideoDecoding = !g_strcmp0(g_getenv("WEBKIT_GST_WEBRTC_FORCE_EARLY_VIDEO_DECODING"), "1");
GST_DEBUG_OBJECT(m_bin.get(), "Configuring for input caps: %" GST_PTR_FORMAT "%s", m_data.caps.get(), forceEarlyVideoDecoding ? " and early decoding" : "");
@@ -151,12 +190,6 @@ GRefPtr GStreamerIncomingTrackProcessor::incomingTrackProcessor()
GRefPtr decodebin = makeGStreamerElement("decodebin3", nullptr);
m_isDecoding = true;
- m_queue = gst_element_factory_make("queue", nullptr);
- m_fakeVideoSink = makeGStreamerElement("fakevideosink", nullptr);
- g_object_set(m_fakeVideoSink.get(), "enable-last-sample", FALSE, nullptr);
- gst_bin_add_many(GST_BIN_CAST(m_bin.get()), m_queue.get(), m_fakeVideoSink.get(), nullptr);
- gst_element_link(m_queue.get(), m_fakeVideoSink.get());
-
g_signal_connect(decodebin.get(), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin*, GstElement* element, gpointer) {
auto elementClass = makeString(gst_element_get_metadata(element, GST_ELEMENT_METADATA_KLASS));
auto classifiers = elementClass.split('/');
@@ -192,13 +225,9 @@ GRefPtr GStreamerIncomingTrackProcessor::incomingTrackProcessor()
}), this);
g_signal_connect_swapped(decodebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) {
- auto sinkPad = adoptGRef(gst_element_get_static_pad(self->m_tee.get(), "sink"));
+ auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue"));
+ auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink"));
gst_pad_link(pad, sinkPad.get());
-
- gst_element_link(self->m_tee.get(), self->m_queue.get());
- gst_element_sync_state_with_parent(self->m_tee.get());
- gst_element_sync_state_with_parent(self->m_queue.get());
- gst_element_sync_state_with_parent(self->m_fakeVideoSink.get());
self->trackReady();
}), this);
return decodebin;
@@ -216,10 +245,26 @@ GRefPtr GStreamerIncomingTrackProcessor::createParser()
configureVideoRTPDepayloader(element);
}), nullptr);
+ auto& quirksManager = GStreamerQuirksManager::singleton();
+ if (quirksManager.isEnabled()) {
+ // Prevent auto-plugging of hardware-accelerated elements. Those will be used in the playback pipeline.
+ g_signal_connect(parsebin.get(), "autoplug-select", G_CALLBACK(+[](GstElement*, GstPad*, GstCaps*, GstElementFactory* factory, gpointer) -> unsigned {
+ static auto skipAutoPlug = gstGetAutoplugSelectResult("skip"_s);
+ static auto tryAutoPlug = gstGetAutoplugSelectResult("try"_s);
+ RELEASE_ASSERT(skipAutoPlug);
+ RELEASE_ASSERT(tryAutoPlug);
+ auto& quirksManager = GStreamerQuirksManager::singleton();
+ auto isHardwareAccelerated = quirksManager.isHardwareAccelerated(factory).value_or(false);
+ if (isHardwareAccelerated)
+ return *skipAutoPlug;
+ return *tryAutoPlug;
+ }), nullptr);
+ }
+
g_signal_connect_swapped(parsebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) {
- auto sinkPad = adoptGRef(gst_element_get_static_pad(self->m_tee.get(), "sink"));
+ auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue"));
+ auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink"));
gst_pad_link(pad, sinkPad.get());
- gst_element_sync_state_with_parent(self->m_tee.get());
self->trackReady();
}), this);
return parsebin;
@@ -250,7 +295,9 @@ const GstStructure* GStreamerIncomingTrackProcessor::stats()
m_stats.reset(gst_structure_new_empty("incoming-video-stats"));
uint64_t droppedVideoFrames = 0;
GUniqueOutPtr stats;
- g_object_get(m_fakeVideoSink.get(), "stats", &stats.outPtr(), nullptr);
+
+ g_object_get(m_sink.get(), "stats", &stats.outPtr(), nullptr);
+
if (!gst_structure_get_uint64(stats.get(), "dropped", &droppedVideoFrames))
return m_stats.get();
diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h
index 58835d483bd56..170597322c743 100644
--- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h
+++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h
@@ -61,7 +61,6 @@ class GStreamerIncomingTrackProcessor : public RefCounted m_pad;
GRefPtr m_bin;
- GRefPtr m_tee;
WebRTCTrackData m_data;
std::pair m_sdpMsIdAndTrackId;
@@ -69,8 +68,7 @@ class GStreamerIncomingTrackProcessor : public RefCounted m_queue;
- GRefPtr m_fakeVideoSink;
+ GRefPtr m_sink;
GUniquePtr m_stats;
bool m_isReady { false };
};
diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp
index f1c5640ad2ca3..abf15eec90729 100644
--- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp
+++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp
@@ -124,14 +124,6 @@ class WebKitMediaStreamObserver : public MediaStreamPrivate::Observer {
static void webkitMediaStreamSrcEnsureStreamCollectionPosted(WebKitMediaStreamSrc*);
-#if USE(GSTREAMER_WEBRTC)
-struct InternalSourcePadProbeData {
- RealtimeIncomingSourceGStreamer* incomingSource;
- int clientId;
-};
-WEBKIT_DEFINE_ASYNC_DATA_STRUCT(InternalSourcePadProbeData)
-#endif
-
class InternalSource final : public MediaStreamTrackPrivate::Observer,
public RealtimeMediaSource::Observer,
public RealtimeMediaSource::AudioSampleObserver,
@@ -188,19 +180,13 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer,
return GST_PAD_PROBE_OK;
}), nullptr, nullptr);
#endif
-
- auto& trackSource = m_track.source();
- if (!trackSource.isIncomingAudioSource() && !trackSource.isIncomingVideoSource())
- return;
-
- connectIncomingTrack();
}
void connectIncomingTrack()
{
#if USE(GSTREAMER_WEBRTC)
auto& trackSource = m_track.source();
- std::optional clientId;
+ int clientId;
auto client = GRefPtr(m_src);
if (trackSource.isIncomingAudioSource()) {
auto& source = static_cast(trackSource);
@@ -219,23 +205,15 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer,
clientId = source.registerClient(WTFMove(client));
}
- if (!clientId) {
- GST_WARNING_OBJECT(m_src.get(), "Incoming track registration failed, track likely not ready yet.");
- return;
- }
-
- m_webrtcSourceClientId = *clientId;
-
- auto data = createInternalSourcePadProbeData();
- data->incomingSource = static_cast(&trackSource);
- data->clientId = *m_webrtcSourceClientId;
+ m_webrtcSourceClientId = clientId;
+ auto incomingSource = static_cast(&trackSource);
auto srcPad = adoptGRef(gst_element_get_static_pad(m_src.get(), "src"));
gst_pad_add_probe(srcPad.get(), static_cast(GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | GST_PAD_PROBE_TYPE_QUERY_UPSTREAM), reinterpret_cast(+[](GstPad* pad, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
- auto data = static_cast(userData);
- if (!data->incomingSource)
+ auto weakSource = static_cast*>(userData);
+ if (!weakSource)
return GST_PAD_PROBE_REMOVE;
-
+ auto incomingSource = weakSource->get();
auto src = adoptGRef(gst_pad_get_parent_element(pad));
if (GST_IS_QUERY(info->data)) {
switch (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info))) {
@@ -248,21 +226,23 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer,
} else
GST_DEBUG_OBJECT(src.get(), "Proxying event %" GST_PTR_FORMAT " to appsink peer", GST_PAD_PROBE_INFO_EVENT(info));
- if (data->incomingSource->isIncomingAudioSource()) {
- auto& source = static_cast(*data->incomingSource);
+ if (incomingSource->isIncomingAudioSource()) {
+ auto& source = static_cast(*incomingSource);
if (GST_IS_EVENT(info->data))
- source.handleUpstreamEvent(GRefPtr]