From 2584afaa455f756b748828cab517f29f21050c81 Mon Sep 17 00:00:00 2001 From: Youenn Fablet Date: Sat, 3 Sep 2022 02:11:18 -0700 Subject: [PATCH 01/15] Move out of OnTrack/OnRemoveTrack libwebrtc callbacks https://bugs.webkit.org/show_bug.cgi?id=244709 rdar://problem/99481570 Reviewed by Eric Carlson. OnTrack/OnRemoveTrack are not standard compliant. We miss some cases where these should be called (say rollback). The timing of these callbacks is also not well aligned with the specification. Instead, everytime a description is applied successfully, we store the current transceiver states from the backend. We then compute the corresponding events from the transceiver states. Covered by existing and rebased tests. * LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt: * LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt: * Source/WebCore/Modules/mediastream/MediaStream.h: * Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp: (WebCore::MediaStreamTrack::trackMutedChanged): * Source/WebCore/Modules/mediastream/MediaStreamTrack.h: (WebCore::MediaStreamTrack::setShouldFireMuteEventImmediately): * Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp: (WebCore::PeerConnectionBackend::setLocalDescription): (WebCore::setAssociatedRemoteStreams): (WebCore::isDirectionReceiving): (WebCore::processRemoteTracks): (WebCore::PeerConnectionBackend::setLocalDescriptionSucceeded): (WebCore::PeerConnectionBackend::setRemoteDescriptionSucceeded): * Source/WebCore/Modules/mediastream/PeerConnectionBackend.h: (WebCore::PeerConnectionBackend::DescriptionStates::isolatedCopy): * Source/WebCore/Modules/mediastream/RTCRtpReceiver.h: * Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::GStreamerMediaEndpoint::doSetRemoteDescription): * Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp: (WebCore::LibWebRTCMediaEndpoint::mediaStreamFromRTCStreamId): (WebCore::LibWebRTCMediaEndpoint::addPendingTrackEvent): (WebCore::LibWebRTCMediaEndpoint::collectTransceivers): (WebCore::LibWebRTCMediaEndpoint::addIceCandidate): (WebCore::LibWebRTCMediaEndpoint::OnIceCandidate): (WebCore::LibWebRTCMediaEndpointTransceiverState::isolatedCopy): (WebCore::toLibWebRTCMediaEndpointTransceiverState): (WebCore::transceiverStatesFromPeerConnection): (WebCore::LibWebRTCMediaEndpoint::setLocalSessionDescriptionSucceeded): (WebCore::LibWebRTCMediaEndpoint::setLocalSessionDescriptionFailed): (WebCore::LibWebRTCMediaEndpoint::setRemoteSessionDescriptionSucceeded): (WebCore::LibWebRTCMediaEndpoint::mediaStreamFromRTCStream): Deleted. (WebCore::LibWebRTCMediaEndpoint::newTransceiver): Deleted. (WebCore::LibWebRTCMediaEndpoint::removeRemoteTrack): Deleted. (WebCore::LibWebRTCMediaEndpoint::OnTrack): Deleted. (WebCore::LibWebRTCMediaEndpoint::OnRemoveTrack): Deleted. * Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h: Canonical link: https://commits.webkit.org/254128@main --- ...setRemoteDescription-rollback-expected.txt | 2 +- ...RTCRtpSender-setStreams.https-expected.txt | 4 +- .../WebCore/Modules/mediastream/MediaStream.h | 3 + .../Modules/mediastream/MediaStreamTrack.cpp | 13 +- .../Modules/mediastream/MediaStreamTrack.h | 3 + .../mediastream/PeerConnectionBackend.cpp | 178 ++++++++++++++++-- .../mediastream/PeerConnectionBackend.h | 29 ++- .../Modules/mediastream/RTCRtpReceiver.h | 5 +- .../Modules/mediastream/RTCRtpTransceiver.h | 4 + .../gstreamer/GStreamerMediaEndpoint.cpp | 4 +- .../libwebrtc/LibWebRTCMediaEndpoint.cpp | 154 +++++++-------- .../libwebrtc/LibWebRTCMediaEndpoint.h | 8 +- 12 files changed, 299 insertions(+), 108 deletions(-) diff --git a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt index 1e6c0dbc83272..2b85aa04a2bdb 100644 --- a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt +++ b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setRemoteDescription-rollback-expected.txt @@ -17,7 +17,7 @@ PASS rollback of a local offer to negotiated stable state should enable applying PASS rollback a local offer with audio direction change to negotiated stable state and then add video receiver PASS two transceivers with same mids PASS onremovetrack fires during remote rollback -FAIL rollback of a remote offer with stream changes assert_equals: expected 2 but got 1 +PASS rollback of a remote offer with stream changes PASS removeTrack() with a sender being rolled back does not crash or throw PASS Implicit rollback with only a datachannel works diff --git a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt index 2a4323b7eb9db..6947a02db718f 100644 --- a/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt +++ b/LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCRtpSender-setStreams.https-expected.txt @@ -1,9 +1,7 @@ -Harness Error (TIMEOUT), message = null - PASS setStreams causes streams to be reported via ontrack on callee PASS setStreams can be used to reconstruct a stream with a track on the remote side PASS Adding streams and changing direction causes new streams to be reported via ontrack on callee -TIMEOUT Adding streams to an active transceiver causes new streams to be reported via ontrack on callee Test timed out +PASS Adding streams to an active transceiver causes new streams to be reported via ontrack on callee PASS setStreams() fires InvalidStateError on a closed peer connection. diff --git a/Source/WebCore/Modules/mediastream/MediaStream.h b/Source/WebCore/Modules/mediastream/MediaStream.h index a9793c54bec1f..abd3b56fa6326 100644 --- a/Source/WebCore/Modules/mediastream/MediaStream.h +++ b/Source/WebCore/Modules/mediastream/MediaStream.h @@ -78,6 +78,9 @@ class MediaStream final RefPtr clone(); + using WeakValueType = MediaStreamPrivate::Observer::WeakValueType; + using MediaStreamPrivate::Observer::weakPtrFactory; + bool active() const { return m_isActive; } bool muted() const { return m_private->muted(); } diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp index 91229c6574d51..b8bb6a0679540 100644 --- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp +++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp @@ -610,12 +610,21 @@ void MediaStreamTrack::trackMutedChanged(MediaStreamTrackPrivate&) if (document->activeDOMObjectsAreStopped() || m_ended) return; - queueTaskKeepingObjectAlive(*this, TaskSource::Networking, [this, muted = m_private->muted()] { + Function updateMuted = [this, muted = m_private->muted()] { if (!scriptExecutionContext() || scriptExecutionContext()->activeDOMObjectsAreStopped()) return; + + if (m_muted == muted) + return; + m_muted = muted; dispatchEvent(Event::create(muted ? eventNames().muteEvent : eventNames().unmuteEvent, Event::CanBubble::No, Event::IsCancelable::No)); - }); + }; + if (m_shouldFireMuteEventImmediately) + updateMuted(); + else + queueTaskKeepingObjectAlive(*this, TaskSource::Networking, WTFMove(updateMuted)); + configureTrackRendering(); bool wasInterrupted = m_isInterrupted; diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.h b/Source/WebCore/Modules/mediastream/MediaStreamTrack.h index 87347e6a0a291..c9d46149f7f15 100644 --- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.h +++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.h @@ -163,6 +163,8 @@ class MediaStreamTrack const void* logIdentifier() const final { return m_private->logIdentifier(); } #endif + void setShouldFireMuteEventImmediately(bool value) { m_shouldFireMuteEventImmediately = value; } + protected: MediaStreamTrack(ScriptExecutionContext&, Ref&&); @@ -212,6 +214,7 @@ class MediaStreamTrack bool m_ended { false }; const bool m_isCaptureTrack { false }; bool m_isInterrupted { false }; + bool m_shouldFireMuteEventImmediately { false }; }; typedef Vector> MediaStreamTrackVector; diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp index 17c0c0c05e592..e04954eea3b2a 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp @@ -186,25 +186,113 @@ void PeerConnectionBackend::setLocalDescription(const RTCSessionDescription* ses { ASSERT(!m_peerConnection.isClosed()); + m_isProcessingLocalDescriptionAnswer = sessionDescription && (sessionDescription->type() == RTCSdpType::Answer || sessionDescription->type() == RTCSdpType::Pranswer); m_setDescriptionCallback = WTFMove(callback); doSetLocalDescription(sessionDescription); } -void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional&& descriptionStates, std::unique_ptr&& sctpBackend) +struct MediaStreamAndTrackItem { + Ref stream; + Ref track; +}; + +// https://w3c.github.io/webrtc-pc/#set-associated-remote-streams +static void setAssociatedRemoteStreams(RTCRtpReceiver& receiver, const PeerConnectionBackend::TransceiverState& state, Vector& addList, Vector& removeList) +{ + for (auto& currentStream : receiver.associatedStreams()) { + if (currentStream && !anyOf(state.receiverStreams, [¤tStream](auto& stream) { return stream->id() == currentStream->id(); })) + removeList.append({ Ref { *currentStream }, Ref { receiver.track() } }); + } + + for (auto& stream : state.receiverStreams) { + if (!anyOf(receiver.associatedStreams(), [&stream](auto& currentStream) { return stream->id() == currentStream->id(); })) + addList.append({ *stream, Ref { receiver.track() } }); + } + + receiver.setAssociatedStreams(WTF::map(state.receiverStreams, [](auto& stream) { return WeakPtr { stream.get() }; })); +} + +static bool isDirectionReceiving(RTCRtpTransceiverDirection direction) +{ + return direction == RTCRtpTransceiverDirection::Sendrecv || direction == RTCRtpTransceiverDirection::Recvonly; +} + +// https://w3c.github.io/webrtc-pc/#process-remote-tracks +static void processRemoteTracks(RTCRtpTransceiver& transceiver, PeerConnectionBackend::TransceiverState&& state, Vector& addList, Vector& removeList, Vector>& trackEventList, Vector>& muteTrackList) +{ + auto addListSize = addList.size(); + auto& receiver = transceiver.receiver(); + setAssociatedRemoteStreams(receiver, state, addList, removeList); + if ((state.firedDirection && isDirectionReceiving(*state.firedDirection) && (!transceiver.firedDirection() || !isDirectionReceiving(*transceiver.firedDirection()))) || addListSize != addList.size()) { + // https://w3c.github.io/webrtc-pc/#process-remote-track-addition + trackEventList.append(RTCTrackEvent::create(eventNames().trackEvent, Event::CanBubble::No, Event::IsCancelable::No, &receiver, &receiver.track(), WTFMove(state.receiverStreams), &transceiver)); + } + if (!(state.firedDirection && isDirectionReceiving(*state.firedDirection)) && transceiver.firedDirection() && isDirectionReceiving(*transceiver.firedDirection())) { + // https://w3c.github.io/webrtc-pc/#process-remote-track-removal + muteTrackList.append(receiver.track()); + } + transceiver.setFiredDirection(state.firedDirection); +} + +void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend) { ASSERT(isMainThread()); ALWAYS_LOG(LOGIDENTIFIER); ASSERT(m_setDescriptionCallback); - m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), sctpBackend = WTFMove(sctpBackend)]() mutable { + m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend)]() mutable { if (m_peerConnection.isClosed()) return; - if (descriptionStates) - m_peerConnection.updateDescriptions(WTFMove(*descriptionStates)); m_peerConnection.updateTransceiversAfterSuccessfulLocalDescription(); m_peerConnection.updateSctpBackend(WTFMove(sctpBackend)); + + if (descriptionStates) { + m_peerConnection.updateDescriptions(WTFMove(*descriptionStates)); + if (m_peerConnection.isClosed()) + return; + } + m_peerConnection.processIceTransportChanges(); + if (m_peerConnection.isClosed()) + return; + + if (m_isProcessingLocalDescriptionAnswer && transceiverStates) { + // Compute track related events. + Vector removeList; + Vector> muteTrackList; + Vector addListNoOp; + for (auto& transceiverState : *transceiverStates) { + RefPtr transceiver; + for (auto& item : m_peerConnection.currentTransceivers()) { + if (item->mid() == transceiverState.mid) { + transceiver = item; + break; + } + } + if (transceiver) { + if (!(transceiverState.firedDirection && isDirectionReceiving(*transceiverState.firedDirection)) && transceiver->firedDirection() && isDirectionReceiving(*transceiver->firedDirection())) { + setAssociatedRemoteStreams(transceiver->receiver(), transceiverState, addListNoOp, removeList); + muteTrackList.append(transceiver->receiver().track()); + } + } + transceiver->setFiredDirection(transceiverState.firedDirection); + } + for (auto& track : muteTrackList) { + track->setShouldFireMuteEventImmediately(true); + track->source().setMuted(true); + track->setShouldFireMuteEventImmediately(false); + if (m_peerConnection.isClosed()) + return; + } + + for (auto& pair : removeList) { + pair.stream->privateStream().removeTrack(pair.track->privateTrack()); + if (m_peerConnection.isClosed()) + return; + } + } + callback({ }); }); } @@ -231,28 +319,92 @@ void PeerConnectionBackend::setRemoteDescription(const RTCSessionDescription& se doSetRemoteDescription(sessionDescription); } -void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&& descriptionStates, std::unique_ptr&& sctpBackend) +void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend) { ASSERT(isMainThread()); ALWAYS_LOG(LOGIDENTIFIER, "Set remote description succeeded"); ASSERT(m_setDescriptionCallback); - m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), sctpBackend = WTFMove(sctpBackend), events = WTFMove(m_pendingTrackEvents)]() mutable { + m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend), events = WTFMove(m_pendingTrackEvents)]() mutable { if (m_peerConnection.isClosed()) return; - if (descriptionStates) - m_peerConnection.updateDescriptions(WTFMove(*descriptionStates)); + Vector removeList; + if (transceiverStates) { + for (auto& transceiver : m_peerConnection.currentTransceivers()) { + if (!anyOf(*transceiverStates, [&transceiver](auto& state) { return state.mid == transceiver->mid(); })) { + for (auto& stream : transceiver->receiver().associatedStreams()) { + if (stream) + removeList.append({ Ref { *stream }, Ref { transceiver->receiver().track() } }); + } + } + } + } + + m_peerConnection.updateTransceiversAfterSuccessfulRemoteDescription(); + m_peerConnection.updateSctpBackend(WTFMove(sctpBackend)); - for (auto& event : events) - dispatchTrackEvent(event); + if (descriptionStates) { + m_peerConnection.updateDescriptions(WTFMove(*descriptionStates)); + if (m_peerConnection.isClosed()) + return; + } + m_peerConnection.processIceTransportChanges(); if (m_peerConnection.isClosed()) return; - m_peerConnection.updateTransceiversAfterSuccessfulRemoteDescription(); - m_peerConnection.updateSctpBackend(WTFMove(sctpBackend)); - m_peerConnection.processIceTransportChanges(); + if (transceiverStates) { + // Compute track related events. + Vector> muteTrackList; + Vector addList; + Vector> trackEventList; + for (auto& transceiverState : *transceiverStates) { + RefPtr transceiver; + for (auto& item : m_peerConnection.currentTransceivers()) { + if (item->mid() == transceiverState.mid) { + transceiver = item; + break; + } + } + if (transceiver) + processRemoteTracks(*transceiver, WTFMove(transceiverState), addList, removeList, trackEventList, muteTrackList); + } + + for (auto& track : muteTrackList) { + track->setShouldFireMuteEventImmediately(true); + track->source().setMuted(true); + track->setShouldFireMuteEventImmediately(false); + if (m_peerConnection.isClosed()) + return; + } + + for (auto& pair : removeList) { + pair.stream->privateStream().removeTrack(pair.track->privateTrack()); + if (m_peerConnection.isClosed()) + return; + } + + for (auto& pair : addList) { + pair.stream->addTrackFromPlatform(pair.track.copyRef()); + if (m_peerConnection.isClosed()) + return; + } + + for (auto& event : trackEventList) { + RefPtr track = event->track(); + m_peerConnection.dispatchEvent(event); + if (m_peerConnection.isClosed()) + return; + + track->source().setMuted(false); + } + } else { + // FIXME: Move ports out of m_pendingTrackEvents. + for (auto& event : events) + dispatchTrackEvent(event); + } + callback({ }); }); } diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h index 62468efdbb63c..e9604852a54be 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h @@ -37,6 +37,7 @@ #include "RTCIceGatheringState.h" #include "RTCRtpCapabilities.h" #include "RTCRtpSendParameters.h" +#include "RTCRtpTransceiverDirection.h" #include "RTCSessionDescription.h" #include "RTCSignalingState.h" #include @@ -136,7 +137,15 @@ class PeerConnectionBackend String currentRemoteDescriptionSdp; std::optional pendingRemoteDescriptionSdpType; String pendingRemoteDescriptionSdp; + + DescriptionStates isolatedCopy() &&; + }; + struct TransceiverState { + String mid; + Vector> receiverStreams; + std::optional firedDirection; }; + using TransceiverStates = Vector; void newICECandidate(String&& sdp, String&& mid, unsigned short sdpMLineIndex, String&& serverURL, std::optional&&); void newDataChannel(UniqueRef&&, String&&, RTCDataChannelInit&&); @@ -211,10 +220,10 @@ class PeerConnectionBackend void createAnswerSucceeded(String&&); void createAnswerFailed(Exception&&); - void setLocalDescriptionSucceeded(std::optional&&, std::unique_ptr&&); + void setLocalDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&); void setLocalDescriptionFailed(Exception&&); - void setRemoteDescriptionSucceeded(std::optional&&, std::unique_ptr&&); + void setRemoteDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&); void setRemoteDescriptionFailed(Exception&&); void validateSDP(const String&) const; @@ -253,8 +262,24 @@ class PeerConnectionBackend const void* m_logIdentifier; #endif bool m_finishedGatheringCandidates { false }; + bool m_isProcessingLocalDescriptionAnswer { false }; }; +inline PeerConnectionBackend::DescriptionStates PeerConnectionBackend::DescriptionStates::isolatedCopy() && +{ + return DescriptionStates { + signalingState, + currentLocalDescriptionSdpType, + WTFMove(currentLocalDescriptionSdp).isolatedCopy(), + pendingLocalDescriptionSdpType, + WTFMove(pendingLocalDescriptionSdp).isolatedCopy(), + currentRemoteDescriptionSdpType, + WTFMove(currentRemoteDescriptionSdp).isolatedCopy(), + pendingRemoteDescriptionSdpType, + WTFMove(pendingRemoteDescriptionSdp).isolatedCopy() + }; +} + } // namespace WebCore #endif // ENABLE(WEB_RTC) diff --git a/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h b/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h index a7999029f1583..d7f6613563e75 100644 --- a/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h +++ b/Source/WebCore/Modules/mediastream/RTCRtpReceiver.h @@ -32,7 +32,7 @@ #if ENABLE(WEB_RTC) -#include "MediaStreamTrack.h" +#include "MediaStream.h" #include "RTCDtlsTransport.h" #include "RTCRtpReceiverBackend.h" #include "RTCRtpSynchronizationSource.h" @@ -79,6 +79,8 @@ class RTCRtpReceiver final : public RefCounted std::optional transform(); ExceptionOr setTransform(std::unique_ptr&&); + const Vector>& associatedStreams() const { return m_associatedStreams; } + void setAssociatedStreams(Vector>&& streams) { m_associatedStreams = WTFMove(streams); } private: RTCRtpReceiver(PeerConnectionBackend&, Ref&&, std::unique_ptr&&); @@ -94,6 +96,7 @@ class RTCRtpReceiver final : public RefCounted std::unique_ptr m_backend; WeakPtr m_connection; std::unique_ptr m_transform; + Vector> m_associatedStreams; #if !RELEASE_LOG_DISABLED Ref m_logger; const void* m_logIdentifier { nullptr }; diff --git a/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h b/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h index 7dfb01948a502..f00f4cd5b0318 100644 --- a/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h +++ b/Source/WebCore/Modules/mediastream/RTCRtpTransceiver.h @@ -72,10 +72,14 @@ class RTCRtpTransceiver final : public RefCounted, public Scr RTCRtpTransceiverBackend* backend() { return m_backend.get(); } void setConnection(RTCPeerConnection&); + std::optional firedDirection() const { return m_firedDirection; } + void setFiredDirection(std::optional firedDirection) { m_firedDirection = firedDirection; } + private: RTCRtpTransceiver(Ref&&, Ref&&, std::unique_ptr&&); RTCRtpTransceiverDirection m_direction; + std::optional m_firedDirection; Ref m_sender; Ref m_receiver; diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp index 41d3237505891..2fe8b59ac627f 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp @@ -465,7 +465,7 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* GRefPtr transport; g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr); - m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), transport ? makeUnique(WTFMove(transport)) : nullptr); + m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr); }, [protectedThis = Ref(*this), this](const GError* error) { if (protectedThis->isStopped()) return; @@ -539,7 +539,7 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& GRefPtr transport; g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr); - m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), transport ? makeUnique(WTFMove(transport)) : nullptr); + m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr); }, [protectedThis = Ref(*this), this](const GError* error) { if (protectedThis->isStopped()) return; diff --git a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp index 1de61cc018b90..1bf844d81b8d6 100644 --- a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp @@ -309,32 +309,16 @@ void LibWebRTCMediaEndpoint::OnSignalingChange(webrtc::PeerConnectionInterface:: { } -MediaStream& LibWebRTCMediaEndpoint::mediaStreamFromRTCStream(webrtc::MediaStreamInterface& rtcStream) +MediaStream& LibWebRTCMediaEndpoint::mediaStreamFromRTCStreamId(const String& id) { - auto label = fromStdString(rtcStream.id()); - auto mediaStream = m_remoteStreamsById.ensure(label, [label, this]() mutable { + auto mediaStream = m_remoteStreamsById.ensure(id, [id, this]() mutable { auto& document = downcast(*m_peerConnectionBackend.connection().scriptExecutionContext()); - return MediaStream::create(document, MediaStreamPrivate::create(document.logger(), { }, WTFMove(label))); + auto stream = MediaStream::create(document, MediaStreamPrivate::create(document.logger(), { }, String(id))); + return stream; }); return *mediaStream.iterator->value; } -void LibWebRTCMediaEndpoint::addPendingTrackEvent(Ref&& receiver, MediaStreamTrack& track, const std::vector>& rtcStreams, RefPtr&& transceiver) -{ - Vector> streams; - for (auto& rtcStream : rtcStreams) { - auto& mediaStream = mediaStreamFromRTCStream(*rtcStream.get()); - streams.append(&mediaStream); - mediaStream.addTrackFromPlatform(track); - } - auto streamIds = WTF::map(streams, [](auto& stream) -> String { - return stream->id(); - }); - m_remoteStreamsFromRemoteTrack.add(&track, WTFMove(streamIds)); - - m_peerConnectionBackend.addPendingTrackEvent({ WTFMove(receiver), track, WTFMove(streams), WTFMove(transceiver) }); -} - void LibWebRTCMediaEndpoint::collectTransceivers() { if (!m_backend) @@ -348,7 +332,7 @@ void LibWebRTCMediaEndpoint::collectTransceivers() continue; auto rtcReceiver = rtcTransceiver->receiver(); - m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), rtcReceiver->media_type() == cricket::MEDIA_TYPE_AUDIO ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video); + existingTransceiver = &m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), rtcReceiver->media_type() == cricket::MEDIA_TYPE_AUDIO ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video); } } @@ -359,37 +343,6 @@ std::optional LibWebRTCMediaEndpoint::canTrickleIceCandidates() const return *m_backend->can_trickle_ice_candidates(); } -void LibWebRTCMediaEndpoint::newTransceiver(rtc::scoped_refptr&& rtcTransceiver) -{ - auto rtcReceiver = rtcTransceiver->receiver(); - auto* transceiver = m_peerConnectionBackend.existingTransceiver([&](auto& transceiverBackend) { - return rtcTransceiver.get() == transceiverBackend.rtcTransceiver(); - }); - if (!transceiver) - transceiver = &m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), rtcReceiver->media_type() == cricket::MEDIA_TYPE_AUDIO ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video); - - addPendingTrackEvent(transceiver->receiver(), transceiver->receiver().track(), rtcReceiver->streams(), transceiver); -} - -void LibWebRTCMediaEndpoint::removeRemoteTrack(rtc::scoped_refptr&& receiver) -{ - auto* transceiver = m_peerConnectionBackend.existingTransceiver([&receiver](auto& transceiverBackend) { - auto* rtcTransceiver = transceiverBackend.rtcTransceiver(); - return rtcTransceiver && receiver.get() == rtcTransceiver->receiver().get(); - }); - if (!transceiver) - return; - - auto& track = transceiver->receiver().track(); - - for (auto& id : m_remoteStreamsFromRemoteTrack.get(&track)) { - if (auto stream = m_remoteStreamsById.get(id)) - stream->privateStream().removeTrack(track.privateTrack()); - } - - track.source().setMuted(true); -} - template ExceptionOr LibWebRTCMediaEndpoint::createTransceiverBackends(T&& trackOrKind, webrtc::RtpTransceiverInit&& init, LibWebRTCRtpSenderBackend::Source&& source) { @@ -451,24 +404,6 @@ std::unique_ptr LibWebRTCMediaEndpoint::transcei return nullptr; } -void LibWebRTCMediaEndpoint::OnTrack(rtc::scoped_refptr transceiver) -{ - callOnMainThread([protectedThis = Ref { *this }, transceiver = WTFMove(transceiver)]() mutable { - if (protectedThis->isStopped()) - return; - protectedThis->newTransceiver(WTFMove(transceiver)); - }); -} - -void LibWebRTCMediaEndpoint::OnRemoveTrack(rtc::scoped_refptr receiver) -{ - callOnMainThread([protectedThis = Ref { *this }, receiver = WTFMove(receiver)]() mutable { - if (protectedThis->isStopped()) - return; - protectedThis->removeRemoteTrack(WTFMove(receiver)); - }); -} - std::unique_ptr LibWebRTCMediaEndpoint::createDataChannel(const String& label, const RTCDataChannelInit& options) { auto init = LibWebRTCDataChannelHandler::fromRTCDataChannelInit(options); @@ -647,7 +582,7 @@ static std::optional descriptionsFromP void LibWebRTCMediaEndpoint::addIceCandidate(std::unique_ptr&& candidate, PeerConnectionBackend::AddIceCandidateCallback&& callback) { m_backend->AddIceCandidate(WTFMove(candidate), [task = createSharedTask(WTFMove(callback)), backend = m_backend](auto&& error) mutable { - callOnMainThread([task = WTFMove(task), descriptions = descriptionsFromPeerConnection(backend.get()), error = WTFMove(error)]() mutable { + callOnMainThread([task = WTFMove(task), descriptions = crossThreadCopy(descriptionsFromPeerConnection(backend.get())), error = WTFMove(error)]() mutable { if (!error.ok()) { task->run(toException(error)); return; @@ -666,7 +601,7 @@ void LibWebRTCMediaEndpoint::OnIceCandidate(const webrtc::IceCandidateInterface auto sdpMLineIndex = safeCast(rtcCandidate->sdp_mline_index()); - callOnMainThread([protectedThis = Ref { *this }, descriptions = descriptionsFromPeerConnection(m_backend.get()), mid = fromStdString(rtcCandidate->sdp_mid()), sdp = fromStdString(sdp), sdpMLineIndex, url = fromStdString(rtcCandidate->server_url())]() mutable { + callOnMainThread([protectedThis = Ref { *this }, descriptions = crossThreadCopy(descriptionsFromPeerConnection(m_backend.get())), mid = fromStdString(rtcCandidate->sdp_mid()), sdp = fromStdString(sdp), sdpMLineIndex, url = fromStdString(rtcCandidate->server_url())]() mutable { if (protectedThis->isStopped()) return; protectedThis->m_peerConnectionBackend.newICECandidate(WTFMove(sdp), WTFMove(mid), sdpMLineIndex, WTFMove(url), WTFMove(descriptions)); @@ -729,12 +664,68 @@ std::unique_ptr SctpTransportState::createBackend return makeUnique(WTFMove(m_transport), m_information.dtls_transport()); } +struct LibWebRTCMediaEndpointTransceiverState { + String mid; + Vector receiverStreamIds; + std::optional firedDirection; + + LibWebRTCMediaEndpointTransceiverState isolatedCopy() &&; +}; + +inline LibWebRTCMediaEndpointTransceiverState LibWebRTCMediaEndpointTransceiverState::isolatedCopy() && +{ + return { + WTFMove(mid).isolatedCopy(), + crossThreadCopy(WTFMove(receiverStreamIds)), + firedDirection + }; +} + +static LibWebRTCMediaEndpointTransceiverState toLibWebRTCMediaEndpointTransceiverState(const webrtc::RtpTransceiverInterface& transceiver) +{ + String mid; + if (auto rtcMid = transceiver.mid()) + mid = fromStdString(*rtcMid); + std::optional firedDirection; + if (auto rtcFiredDirection = transceiver.fired_direction()) + firedDirection = toRTCRtpTransceiverDirection(*rtcFiredDirection); + + auto rtcStreamIds = transceiver.receiver()->stream_ids(); + Vector streamIds; + streamIds.reserveInitialCapacity(rtcStreamIds.size()); + for (auto& streamId : rtcStreamIds) + streamIds.uncheckedAppend(fromStdString(streamId)); + + return { WTFMove(mid), WTFMove(streamIds), firedDirection }; +} + +static Vector transceiverStatesFromPeerConnection(webrtc::PeerConnectionInterface& connection) +{ + auto transceivers = connection.GetTransceivers(); + Vector states; + states.reserveInitialCapacity(transceivers.size()); + for (auto& transceiver : transceivers) + states.uncheckedAppend(toLibWebRTCMediaEndpointTransceiverState(*transceiver)); + + return states; +} + void LibWebRTCMediaEndpoint::setLocalSessionDescriptionSucceeded() { - callOnMainThread([protectedThis = Ref { *this }, descriptions = descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable { + if (!m_backend) + return; + + callOnMainThread([protectedThis = Ref { *this }, this, descriptions = crossThreadCopy(descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes)), rtcTransceiverStates = crossThreadCopy(transceiverStatesFromPeerConnection(*m_backend)), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable { if (protectedThis->isStopped()) return; - protectedThis->m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), sctpState.createBackend()); + + auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState { + auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr { + return &mediaStreamFromRTCStreamId(id); + }); + return { WTFMove(state.mid), WTFMove(streams), state.firedDirection }; + }); + protectedThis->m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend()); }); } @@ -746,13 +737,22 @@ void LibWebRTCMediaEndpoint::setLocalSessionDescriptionFailed(ExceptionCode erro protectedThis->m_peerConnectionBackend.setLocalDescriptionFailed(Exception { errorCode, WTFMove(errorMessage) }); }); } - void LibWebRTCMediaEndpoint::setRemoteSessionDescriptionSucceeded() { - callOnMainThread([protectedThis = Ref { *this }, descriptions = descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable { + if (!m_backend) + return; + + callOnMainThread([protectedThis = Ref { *this }, this, descriptions = crossThreadCopy(descriptionsFromPeerConnection(m_backend.get(), GatherSignalingState::Yes)), rtcTransceiverStates = crossThreadCopy(transceiverStatesFromPeerConnection(*m_backend)), sctpState = SctpTransportState(m_backend->GetSctpTransport())]() mutable { if (protectedThis->isStopped()) return; - protectedThis->m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), sctpState.createBackend()); + + auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState { + auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr { + return &mediaStreamFromRTCStreamId(id); + }); + return { WTFMove(state.mid), WTFMove(streams), state.firedDirection }; + }); + protectedThis->m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend()); }); } diff --git a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h index 62dc91d39c165..a4980ba801172 100644 --- a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h +++ b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.h @@ -128,8 +128,6 @@ class LibWebRTCMediaEndpoint // webrtc::PeerConnectionObserver API void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState) final; void OnDataChannel(rtc::scoped_refptr) final; - void OnTrack(rtc::scoped_refptr) final; - void OnRemoveTrack(rtc::scoped_refptr) final; void OnNegotiationNeededEvent(uint32_t) final; void OnStandardizedIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState) final; @@ -143,10 +141,6 @@ class LibWebRTCMediaEndpoint void setLocalSessionDescriptionFailed(ExceptionCode, const char*); void setRemoteSessionDescriptionSucceeded(); void setRemoteSessionDescriptionFailed(ExceptionCode, const char*); - void newTransceiver(rtc::scoped_refptr&&); - void removeRemoteTrack(rtc::scoped_refptr&&); - - void addPendingTrackEvent(Ref&&, MediaStreamTrack&, const std::vector>&, RefPtr&&); template ExceptionOr createTransceiverBackends(T&&, webrtc::RtpTransceiverInit&&, LibWebRTCRtpSenderBackend::Source&&); @@ -158,7 +152,7 @@ class LibWebRTCMediaEndpoint rtc::scoped_refptr createStatsCollector(Ref&&); - MediaStream& mediaStreamFromRTCStream(webrtc::MediaStreamInterface&); + MediaStream& mediaStreamFromRTCStreamId(const String&); void AddRef() const { ref(); } rtc::RefCountReleaseStatus Release() const From fd293f7a49250c23c34ab001ab4b2a4798b50e5d Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Tue, 14 May 2024 02:33:45 -0700 Subject: [PATCH 02/15] Incoming video track renderer should rely on the track intrinsic size https://bugs.webkit.org/show_bug.cgi?id=274093 Reviewed by Xabier Rodriguez-Calvar. Pixel and display aspect ratios shouldn't be applied for WebRTC video tracks. The intrinsic size is re-used as it is. The avf MediaStream player behaves similarly. * Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: (WebCore::MediaPlayerPrivateGStreamer::updateVideoSizeAndOrientationFromCaps): * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp: (WebCore::RealtimeIncomingVideoSourceGStreamer::ensureSizeAndFramerate): (WebCore::RealtimeIncomingVideoSourceGStreamer::dispatchSample): Canonical link: https://commits.webkit.org/278745@main --- .../gstreamer/MediaPlayerPrivateGStreamer.cpp | 16 ++++++++++++++-- .../RealtimeIncomingVideoSourceGStreamer.cpp | 4 ++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp index bc5dc486bfd74..d2d076a2aba7c 100644 --- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp +++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp @@ -3595,8 +3595,21 @@ void MediaPlayerPrivateGStreamer::updateVideoSizeAndOrientationFromCaps(const Gs if (m_videoSourceOrientation.usesWidthAsHeight()) originalSize = originalSize.transposedSize(); + auto scopeExit = makeScopeExit([&] { + if (auto* player = m_player) { + GST_DEBUG_OBJECT(pipeline(), "Notifying sizeChanged event to upper layer"); + player->sizeChanged(); + } + }); + GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height()); - GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); + if (isMediaStreamPlayer()) { + GST_DEBUG_OBJECT(pipeline(), "Using original MediaStream track video intrinsic size"); + m_videoSize = originalSize; + return; + } + + GST_DEBUG_OBJECT(pipeline(), "Applying pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); // Calculate DAR based on PAR and video size. int displayWidth = originalSize.width() * pixelAspectRatioNumerator; @@ -3625,7 +3638,6 @@ void MediaPlayerPrivateGStreamer::updateVideoSizeAndOrientationFromCaps(const Gs GST_DEBUG_OBJECT(pipeline(), "Saving natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); m_videoSize = FloatSize(static_cast(width), static_cast(height)); - m_player->sizeChanged(); } void MediaPlayerPrivateGStreamer::setCachedPosition(const MediaTime& cachedPosition) const diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp index a7c3c73c656be..d7ad10da3c3e0 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp @@ -103,7 +103,7 @@ void RealtimeIncomingVideoSourceGStreamer::settingsDidChange(OptionSet& caps) { if (auto size = getVideoResolutionFromCaps(caps.get())) - setSize({ static_cast(size->width()), static_cast(size->height()) }); + setIntrinsicSize({ static_cast(size->width()), static_cast(size->height()) }); int frameRateNumerator, frameRateDenominator; auto* structure = gst_caps_get_structure(caps.get(), 0); @@ -122,7 +122,7 @@ void RealtimeIncomingVideoSourceGStreamer::dispatchSample(GRefPtr&& s auto* caps = gst_sample_get_caps(sample.get()); ensureSizeAndFramerate(GRefPtr(caps)); - videoFrameAvailable(VideoFrameGStreamer::create(WTFMove(sample), size(), fromGstClockTime(GST_BUFFER_PTS(buffer))), { }); + videoFrameAvailable(VideoFrameGStreamer::create(WTFMove(sample), intrinsicSize(), fromGstClockTime(GST_BUFFER_PTS(buffer))), { }); } const GstStructure* RealtimeIncomingVideoSourceGStreamer::stats() From 2ac597c0382db7ba02fbe20b18b7a1c00ccff881 Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Wed, 5 Jun 2024 10:34:19 -0700 Subject: [PATCH 03/15] Missing a=msid in offer after setting codec preferences https://bugs.webkit.org/show_bug.cgi?id=275157 Reviewed by Philippe Normand. When setting codec preferences on GstWebRTCRTPTransceiver, the caps object did not contain an "a-msid" field, which causes the offer created by the webrtcbin containing that transceiver to not include an `a=msid` line for it. The issue is fixed by reusing the `a-msid` field from the pre-existent codec preference, if one exists. * LayoutTests/webrtc/msid-setCodecPreferences-expected.txt: Added. * LayoutTests/webrtc/msid-setCodecPreferences.html: Added. * Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp: (WebCore::getMsidFromCurrentCodecPreferences): (WebCore::GStreamerRtpTransceiverBackend::setCodecPreferences): Canonical link: https://commits.webkit.org/279746@main --- .../msid-setCodecPreferences-expected.txt | 3 +++ .../webrtc/msid-setCodecPreferences.html | 27 +++++++++++++++++++ .../GStreamerRtpTransceiverBackend.cpp | 21 +++++++++++++-- 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 LayoutTests/webrtc/msid-setCodecPreferences-expected.txt create mode 100644 LayoutTests/webrtc/msid-setCodecPreferences.html diff --git a/LayoutTests/webrtc/msid-setCodecPreferences-expected.txt b/LayoutTests/webrtc/msid-setCodecPreferences-expected.txt new file mode 100644 index 0000000000000..c965098c64331 --- /dev/null +++ b/LayoutTests/webrtc/msid-setCodecPreferences-expected.txt @@ -0,0 +1,3 @@ + +PASS msid present in offer SDP after setting codec preferences + diff --git a/LayoutTests/webrtc/msid-setCodecPreferences.html b/LayoutTests/webrtc/msid-setCodecPreferences.html new file mode 100644 index 0000000000000..2f4f69a205066 --- /dev/null +++ b/LayoutTests/webrtc/msid-setCodecPreferences.html @@ -0,0 +1,27 @@ + + + + + Testing basic video exchange from offerer to receiver + + + + + + + + diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp index 93a45b645c9c0..2a5fa2f638c9d 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerRtpTransceiverBackend.cpp @@ -109,7 +109,7 @@ bool GStreamerRtpTransceiverBackend::stopped() const return m_isStopped; } -static inline WARN_UNUSED_RETURN ExceptionOr toRtpCodecCapability(const RTCRtpCodecCapability& codec, int& dynamicPayloadType) +static inline WARN_UNUSED_RETURN ExceptionOr toRtpCodecCapability(const RTCRtpCodecCapability& codec, int& dynamicPayloadType, const char* msid) { if (!codec.mimeType.startsWith("video/"_s) && !codec.mimeType.startsWith("audio/"_s)) return Exception { ExceptionCode::InvalidModificationError, "RTCRtpCodecCapability bad mimeType"_s }; @@ -135,16 +135,33 @@ static inline WARN_UNUSED_RETURN ExceptionOr toRtpCodecCapability(cons } } + if (msid) + gst_caps_set_simple(caps, "a-msid", G_TYPE_STRING, msid, nullptr); + GST_DEBUG("Codec capability: %" GST_PTR_FORMAT, caps); return caps; } +static GUniquePtr getMsidFromCurrentCodecPreferences(GstWebRTCRTPTransceiver* transceiver) +{ + GRefPtr currentCaps; + GUniquePtr msid; + g_object_get(transceiver, "codec-preferences", ¤tCaps.outPtr(), nullptr); + GST_TRACE_OBJECT(transceiver, "Current codec preferences: %" GST_PTR_FORMAT, currentCaps.get()); + if (gst_caps_get_size(currentCaps.get()) > 0) { + auto* s = gst_caps_get_structure(currentCaps.get(), 0); + msid = GUniquePtr(g_strdup(gst_structure_get_string(s, "a-msid"))); + } + return msid; +} + ExceptionOr GStreamerRtpTransceiverBackend::setCodecPreferences(const Vector& codecs) { auto gstCodecs = adoptGRef(gst_caps_new_empty()); + GUniquePtr msid = getMsidFromCurrentCodecPreferences(m_rtcTransceiver.get()); int dynamicPayloadType = 96; for (auto& codec : codecs) { - auto result = toRtpCodecCapability(codec, dynamicPayloadType); + auto result = toRtpCodecCapability(codec, dynamicPayloadType, msid.get()); if (result.hasException()) return result.releaseException(); gst_caps_append(gstCodecs.get(), result.releaseReturnValue()); From 631510a69f884061c464b90e99aa083eb8f7cee2 Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Mon, 29 Apr 2024 01:53:02 -0700 Subject: [PATCH 04/15] Missing media in SDP if setConfiguration() is called after createDataChannel() or addTransceiver() https://bugs.webkit.org/show_bug.cgi?id=273318 Reviewed by Xabier Rodriguez-Calvar. GStreamerMediaEndpoint::setConfiguration() was tearing down the pipeline if one already existed and creating a new one, which is an issue if any data channels or transceivers are created before RTCPeerConnection.setConfiguration(). The issue is fixed by creating the pipeline earlier in GStreamerMediaEndpoint's contructor, so that data channels or transceivers aren't discarded if created/added before setConfiguration(). Credit to Philippe Normand for finding the issue and fixing it. I wrote the layout test, which fails without his fix. * LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt: Added. * LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html: Added. * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::GStreamerMediaEndpoint::GStreamerMediaEndpoint): (WebCore::GStreamerMediaEndpoint::setConfiguration): Canonical link: https://commits.webkit.org/278099@main --- ...DataChannel-or-addTransceiver-expected.txt | 5 +++ ...r-createDataChannel-or-addTransceiver.html | 39 +++++++++++++++++++ .../gstreamer/GStreamerMediaEndpoint.cpp | 8 +--- 3 files changed, 46 insertions(+), 6 deletions(-) create mode 100644 LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt create mode 100644 LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html diff --git a/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt new file mode 100644 index 0000000000000..f5340be2d8065 --- /dev/null +++ b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver-expected.txt @@ -0,0 +1,5 @@ + +PASS setConfiguration after data channel is created +PASS setConfiguration after video transceiver is added +PASS setConfiguration after audio transceiver is added + diff --git a/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html new file mode 100644 index 0000000000000..2b099271bbd7e --- /dev/null +++ b/LayoutTests/webrtc/setConfiguration-after-createDataChannel-or-addTransceiver.html @@ -0,0 +1,39 @@ + + + + + Testing media fields in SDP when setConfiguration comes after createDataChannel/addTransceiver + + + + + + + + diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp index 2fe8b59ac627f..82dfba7f645b1 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp @@ -80,6 +80,8 @@ GStreamerMediaEndpoint::GStreamerMediaEndpoint(GStreamerPeerConnectionBackend& p std::call_once(debugRegisteredFlag, [] { GST_DEBUG_CATEGORY_INIT(webkit_webrtc_endpoint_debug, "webkitwebrtcendpoint", 0, "WebKit WebRTC end-point"); }); + + initializePipeline(); } GStreamerMediaEndpoint::~GStreamerMediaEndpoint() @@ -244,12 +246,6 @@ void GStreamerMediaEndpoint::disposeElementChain(GstElement* element) bool GStreamerMediaEndpoint::setConfiguration(MediaEndpointConfiguration& configuration) { - if (m_pipeline) - teardownPipeline(); - - if (!initializePipeline()) - return false; - auto bundlePolicy = bundlePolicyFromConfiguration(configuration); auto iceTransportPolicy = iceTransportPolicyFromConfiguration(configuration); g_object_set(m_webrtcBin.get(), "bundle-policy", bundlePolicy, "ice-transport-policy", iceTransportPolicy, nullptr); From 77afee3258f8a495f4406e4cc1ec655b9eabfae3 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Wed, 20 Mar 2024 12:44:47 -0700 Subject: [PATCH 05/15] webrtc/connection-state.html started failing after update to 1.24.0 https://bugs.webkit.org/show_bug.cgi?id=271243 Reviewed by Xabier Rodriguez-Calvar. The `doneGatheringCandidates()` method is called by the PeerConnectionBackend when it's notified from gst-webrtc that the ICE gathering is finished, so we don't need to call it ourselves. The end-of-candidates SDP attribute shouldn't appear in the offer/answer the end-point reports either. This is covered by the webrtc/libwebrtc/descriptionGetters.html test. * LayoutTests/platform/glib/TestExpectations: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::fetchDescription): (WebCore::GStreamerMediaEndpoint::doSetLocalDescription): (WebCore::GStreamerMediaEndpoint::doSetRemoteDescription): (WebCore::GStreamerMediaEndpoint::onIceCandidate): Canonical link: https://commits.webkit.org/276412@main --- .../Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp index 82dfba7f645b1..522019b107e3f 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp @@ -313,7 +313,7 @@ static std::optional> fetchDescription(GstElement* const auto attribute = gst_sdp_message_get_attribute(description->sdp, i); if (!g_strcmp0(attribute->key, "end-of-candidates")) { gst_sdp_message_remove_attribute(description->sdp, i); - i--; + break; } } From 416b31af7cd431d5fe2f9ba2a34108282bef3b64 Mon Sep 17 00:00:00 2001 From: Ahmad Saleem Date: Tue, 21 May 2024 00:01:18 -0700 Subject: [PATCH 06/15] `m_maxMessageSize` should be `infinity()` instead of `max()` in `RTCSctpTransport.h` https://bugs.webkit.org/show_bug.cgi?id=274025 rdar://problem/128306030 Reviewed by Youenn Fablet. This patch aligns WebKit with web specification [1]: [1] https://w3c.github.io/webrtc-pc/#dfn-update-the-data-max-message-size "If both remoteMaxMessageSize and canSendSize are 0, set [[MaxMessageSize]] to the positive Infinity value." * Source/WebCore/Modules/mediastream/RTCSctpTransport.h: (double m_maxMessageSize): * LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCSctpTransport-maxMessageSize-expected.txt: Rebaselined Canonical link: https://commits.webkit.org/279039@main --- Source/WebCore/Modules/mediastream/RTCSctpTransport.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransport.h b/Source/WebCore/Modules/mediastream/RTCSctpTransport.h index ec3745b97a1a3..2d2ab2c14ceb0 100644 --- a/Source/WebCore/Modules/mediastream/RTCSctpTransport.h +++ b/Source/WebCore/Modules/mediastream/RTCSctpTransport.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2021 Apple Inc. All rights reserved. + * Copyright (C) 2021-2024 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -73,7 +73,8 @@ class RTCSctpTransport final : public RefCounted, public Activ UniqueRef m_backend; Ref m_transport; RTCSctpTransportState m_state { RTCSctpTransportState::Connecting }; - double m_maxMessageSize { std::numeric_limits::max() }; + // https://w3c.github.io/webrtc-pc/#dfn-update-the-data-max-message-size + double m_maxMessageSize { std::numeric_limits::infinity() }; std::optional m_maxChannels; }; From 872134e86b552526303851d1639f617494a8e037 Mon Sep 17 00:00:00 2001 From: Youenn Fablet Date: Wed, 22 May 2024 00:05:50 -0700 Subject: [PATCH 07/15] Most of WPT webrtc/RTCSctpTransport-maxMessageSize.html tests are failing https://bugs.webkit.org/show_bug.cgi?id=274442 rdar://128444396 Reviewed by Philippe Normand. We implement https://w3c.github.io/webrtc-pc/#sctp-transport-update-mms. This is called when successfully applying a SDP description as per specification. In this implementation, the assumption is that canSendSize is 0. The specific value of 65536 is handled by libwebrtc. * LayoutTests/imported/w3c/web-platform-tests/webrtc/RTCSctpTransport-maxMessageSize-expected.txt: * Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp: (WebCore::PeerConnectionBackend::setLocalDescriptionSucceeded): (WebCore::PeerConnectionBackend::setRemoteDescriptionSucceeded): * Source/WebCore/Modules/mediastream/PeerConnectionBackend.h: * Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp: (WebCore::RTCPeerConnection::updateSctpBackend): * Source/WebCore/Modules/mediastream/RTCPeerConnection.h: * Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp: (WebCore::RTCSctpTransport::onStateChanged): (WebCore::RTCSctpTransport::updateMaxMessageSize): * Source/WebCore/Modules/mediastream/RTCSctpTransport.h: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::GStreamerMediaEndpoint::doSetLocalDescription): (WebCore::GStreamerMediaEndpoint::doSetRemoteDescription): * Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp: (WebCore::SctpTransportState::maxMessageSize const): (WebCore::LibWebRTCMediaEndpoint::setLocalSessionDescriptionSucceeded): (WebCore::LibWebRTCMediaEndpoint::setRemoteSessionDescriptionSucceeded): Canonical link: https://commits.webkit.org/279111@main --- .../mediastream/PeerConnectionBackend.cpp | 12 +++++----- .../mediastream/PeerConnectionBackend.h | 4 ++-- .../Modules/mediastream/RTCPeerConnection.cpp | 23 ++++++++++--------- .../Modules/mediastream/RTCPeerConnection.h | 2 +- .../Modules/mediastream/RTCSctpTransport.cpp | 8 +++++-- .../Modules/mediastream/RTCSctpTransport.h | 7 +++--- .../gstreamer/GStreamerMediaEndpoint.cpp | 20 ++++++++++++++-- .../libwebrtc/LibWebRTCMediaEndpoint.cpp | 10 ++++++-- 8 files changed, 56 insertions(+), 30 deletions(-) diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp index e04954eea3b2a..f07469b9d4388 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp @@ -234,18 +234,18 @@ static void processRemoteTracks(RTCRtpTransceiver& transceiver, PeerConnectionBa transceiver.setFiredDirection(state.firedDirection); } -void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend) +void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend, std::optional maxMessageSize) { ASSERT(isMainThread()); ALWAYS_LOG(LOGIDENTIFIER); ASSERT(m_setDescriptionCallback); - m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend)]() mutable { + m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend), maxMessageSize]() mutable { if (m_peerConnection.isClosed()) return; m_peerConnection.updateTransceiversAfterSuccessfulLocalDescription(); - m_peerConnection.updateSctpBackend(WTFMove(sctpBackend)); + m_peerConnection.updateSctpBackend(WTFMove(sctpBackend), maxMessageSize); if (descriptionStates) { m_peerConnection.updateDescriptions(WTFMove(*descriptionStates)); @@ -319,13 +319,13 @@ void PeerConnectionBackend::setRemoteDescription(const RTCSessionDescription& se doSetRemoteDescription(sessionDescription); } -void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend) +void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&& descriptionStates, std::optional&& transceiverStates, std::unique_ptr&& sctpBackend, std::optional maxMessageSize) { ASSERT(isMainThread()); ALWAYS_LOG(LOGIDENTIFIER, "Set remote description succeeded"); ASSERT(m_setDescriptionCallback); - m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend), events = WTFMove(m_pendingTrackEvents)]() mutable { + m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), descriptionStates = WTFMove(descriptionStates), transceiverStates = WTFMove(transceiverStates), sctpBackend = WTFMove(sctpBackend), maxMessageSize, events = WTFMove(m_pendingTrackEvents)]() mutable { if (m_peerConnection.isClosed()) return; @@ -342,7 +342,7 @@ void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&); + void setLocalDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&, std::optional); void setLocalDescriptionFailed(Exception&&); - void setRemoteDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&); + void setRemoteDescriptionSucceeded(std::optional&&, std::optional&&, std::unique_ptr&&, std::optional); void setRemoteDescriptionFailed(Exception&&); void validateSDP(const String&) const; diff --git a/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp b/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp index 0e4ed66346916..6e2669b6d854c 100644 --- a/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp +++ b/Source/WebCore/Modules/mediastream/RTCPeerConnection.cpp @@ -1096,24 +1096,25 @@ void RTCPeerConnection::updateTransceiversAfterSuccessfulRemoteDescription() updateTransceiverTransports(); } -void RTCPeerConnection::updateSctpBackend(std::unique_ptr&& sctpBackend) +void RTCPeerConnection::updateSctpBackend(std::unique_ptr&& sctpBackend, std::optional maxMessageSize) { if (!sctpBackend) { m_sctpTransport = nullptr; return; } - if (m_sctpTransport && m_sctpTransport->backend() == *sctpBackend) { - m_sctpTransport->update(); - return; + + if (!m_sctpTransport || m_sctpTransport->backend() != *sctpBackend) { + RefPtr context = scriptExecutionContext(); + if (!context) + return; + + auto dtlsTransport = getOrCreateDtlsTransport(sctpBackend->dtlsTransportBackend().moveToUniquePtr()); + if (!dtlsTransport) + return; + m_sctpTransport = RTCSctpTransport::create(*context, makeUniqueRefFromNonNullUniquePtr(WTFMove(sctpBackend)), dtlsTransport.releaseNonNull()); } - auto* context = scriptExecutionContext(); - if (!context) - return; - auto dtlsTransport = getOrCreateDtlsTransport(sctpBackend->dtlsTransportBackend().moveToUniquePtr()); - if (!dtlsTransport) - return; - m_sctpTransport = RTCSctpTransport::create(*context, makeUniqueRefFromNonNullUniquePtr(WTFMove(sctpBackend)), dtlsTransport.releaseNonNull()); + m_sctpTransport->updateMaxMessageSize(maxMessageSize); } #if !RELEASE_LOG_DISABLED diff --git a/Source/WebCore/Modules/mediastream/RTCPeerConnection.h b/Source/WebCore/Modules/mediastream/RTCPeerConnection.h index a9b4371b841d5..08ee575633d3b 100644 --- a/Source/WebCore/Modules/mediastream/RTCPeerConnection.h +++ b/Source/WebCore/Modules/mediastream/RTCPeerConnection.h @@ -191,7 +191,7 @@ class RTCPeerConnection final void updateDescriptions(PeerConnectionBackend::DescriptionStates&&); void updateTransceiversAfterSuccessfulLocalDescription(); void updateTransceiversAfterSuccessfulRemoteDescription(); - void updateSctpBackend(std::unique_ptr&&); + void updateSctpBackend(std::unique_ptr&&, std::optional); void processIceTransportStateChange(RTCIceTransport&); void processIceTransportChanges(); diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp b/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp index 278903b22bb35..e6260f95291c3 100644 --- a/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp +++ b/Source/WebCore/Modules/mediastream/RTCSctpTransport.cpp @@ -74,8 +74,7 @@ void RTCSctpTransport::onStateChanged(RTCSctpTransportState state, std::optional if (m_state == RTCSctpTransportState::Closed) return; - if (maxMessageSize) - m_maxMessageSize = *maxMessageSize; + m_maxMessageSize = maxMessageSize; if (maxChannels) m_maxChannels = *maxChannels; @@ -86,6 +85,11 @@ void RTCSctpTransport::onStateChanged(RTCSctpTransportState state, std::optional }); } +void RTCSctpTransport::updateMaxMessageSize(std::optional maxMessageSize) +{ + m_maxMessageSize = maxMessageSize; +} + } // namespace WebCore #endif // ENABLE(WEB_RTC) diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransport.h b/Source/WebCore/Modules/mediastream/RTCSctpTransport.h index 2d2ab2c14ceb0..7b529c04e1642 100644 --- a/Source/WebCore/Modules/mediastream/RTCSctpTransport.h +++ b/Source/WebCore/Modules/mediastream/RTCSctpTransport.h @@ -46,10 +46,10 @@ class RTCSctpTransport final : public RefCounted, public Activ RTCDtlsTransport& transport() { return m_transport.get(); } RTCSctpTransportState state() const { return m_state; } - double maxMessageSize() const { return m_maxMessageSize; } + double maxMessageSize() const { return m_maxMessageSize.value_or(std::numeric_limits::infinity()); } std::optional maxChannels() const { return m_maxChannels; } - void update() { } + void updateMaxMessageSize(std::optional); const RTCSctpTransportBackend& backend() const { return m_backend.get(); } @@ -73,8 +73,7 @@ class RTCSctpTransport final : public RefCounted, public Activ UniqueRef m_backend; Ref m_transport; RTCSctpTransportState m_state { RTCSctpTransportState::Connecting }; - // https://w3c.github.io/webrtc-pc/#dfn-update-the-data-max-message-size - double m_maxMessageSize { std::numeric_limits::infinity() }; + std::optional m_maxMessageSize; std::optional m_maxChannels; }; diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp index 522019b107e3f..65a58fed6b1f0 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp @@ -461,7 +461,15 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* GRefPtr transport; g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr); - m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr); + + std::optional maxMessageSize; + if (transport) { + uint64_t maxMessageSizeValue; + g_object_get(transport.get(), "max-message-size", &maxMessageSizeValue, nullptr); + maxMessageSize = static_cast(maxMessageSizeValue); + } + + m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize); }, [protectedThis = Ref(*this), this](const GError* error) { if (protectedThis->isStopped()) return; @@ -535,7 +543,15 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& GRefPtr transport; g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr); - m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr); + + std::optional maxMessageSize; + if (transport) { + uint64_t maxMessageSizeValue; + g_object_get(transport.get(), "max-message-size", &maxMessageSizeValue, nullptr); + maxMessageSize = static_cast(maxMessageSizeValue); + } + + m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize); }, [protectedThis = Ref(*this), this](const GError* error) { if (protectedThis->isStopped()) return; diff --git a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp index 1bf844d81b8d6..0344d8646d66a 100644 --- a/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/libwebrtc/LibWebRTCMediaEndpoint.cpp @@ -644,6 +644,7 @@ class SctpTransportState { public: explicit SctpTransportState(rtc::scoped_refptr&&); std::unique_ptr createBackend(); + std::optional maxMessageSize() const; private: rtc::scoped_refptr m_transport; @@ -664,6 +665,11 @@ std::unique_ptr SctpTransportState::createBackend return makeUnique(WTFMove(m_transport), m_information.dtls_transport()); } +std::optional SctpTransportState::maxMessageSize() const +{ + return m_information.MaxMessageSize() ? std::make_optional(*m_information.MaxMessageSize()) : std::nullopt; +} + struct LibWebRTCMediaEndpointTransceiverState { String mid; Vector receiverStreamIds; @@ -725,7 +731,7 @@ void LibWebRTCMediaEndpoint::setLocalSessionDescriptionSucceeded() }); return { WTFMove(state.mid), WTFMove(streams), state.firedDirection }; }); - protectedThis->m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend()); + protectedThis->m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend(), sctpState.maxMessageSize()); }); } @@ -752,7 +758,7 @@ void LibWebRTCMediaEndpoint::setRemoteSessionDescriptionSucceeded() }); return { WTFMove(state.mid), WTFMove(streams), state.firedDirection }; }); - protectedThis->m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend()); + protectedThis->m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), sctpState.createBackend(), sctpState.maxMessageSize()); }); } From d2b1a8254e0208fd01692185635f77100be1293d Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Wed, 5 Jun 2024 08:38:20 -0700 Subject: [PATCH 08/15] Add debug logging of TransceiverStates https://bugs.webkit.org/show_bug.cgi?id=275146 Reviewed by Philippe Normand. Add debug logging of TransceiverStates, to aid debugging while moving the GStreamer backend out of m_pendingTrackEvents in PeerConnectionBackend. * Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp: (WebCore::PeerConnectionBackend::setLocalDescriptionSucceeded): (WebCore::PeerConnectionBackend::setRemoteDescriptionSucceeded): (WebCore::toJSONObject): (WebCore::toJSONArray): (WebCore::toJSONString): (WTF::LogArgument::toString): (WTF::LogArgument::toString): * Source/WebCore/Modules/mediastream/PeerConnectionBackend.h: (WebCore::PeerConnectionBackend::DescriptionStates::isolatedCopy): * Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h: Canonical link: https://commits.webkit.org/279742@main --- .../mediastream/PeerConnectionBackend.cpp | 54 ++++++++++++++++++- .../mediastream/PeerConnectionBackend.h | 18 ++++++- .../mediastream/RTCRtpTransceiverDirection.h | 4 ++ 3 files changed, 74 insertions(+), 2 deletions(-) diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp index f07469b9d4388..15a4470a75a61 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp @@ -238,7 +238,8 @@ void PeerConnectionBackend::setLocalDescriptionSucceeded(std::optional toJSONObject(const PeerConnectionBackend::TransceiverState& transceiverState) +{ + auto object = JSON::Object::create(); + object->setString("mid"_s, transceiverState.mid); + + auto receiverStreams = JSON::Array::create(); + for (auto receiverStream : transceiverState.receiverStreams) + receiverStreams->pushString(receiverStream->id()); + object->setArray("receiverStreams"_s, WTFMove(receiverStreams)); + + if (auto firedDirection = transceiverState.firedDirection) + object->setString("firedDirection"_s, convertEnumerationToString(*firedDirection)); + + return object; +} + +static Ref toJSONArray(const PeerConnectionBackend::TransceiverStates& transceiverStates) +{ + auto array = JSON::Array::create(); + for (auto transceiverState : transceiverStates) + array->pushObject(toJSONObject(transceiverState)); + + return array; +} + +static String toJSONString(const PeerConnectionBackend::TransceiverState& transceiverState) +{ + return toJSONObject(transceiverState)->toJSONString(); +} + +static String toJSONString(const PeerConnectionBackend::TransceiverStates& transceiverStates) +{ + return toJSONArray(transceiverStates)->toJSONString(); +} + } // namespace WebCore +namespace WTF { + +String LogArgument::toString(const WebCore::PeerConnectionBackend::TransceiverState& transceiverState) +{ + return toJSONString(transceiverState); +} + +String LogArgument::toString(const WebCore::PeerConnectionBackend::TransceiverStates& transceiverStates) +{ + return toJSONString(transceiverStates); +} + +} + #endif // ENABLE(WEB_RTC) diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h index d5dd79eb913db..a43d01120973e 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h @@ -279,7 +279,23 @@ inline PeerConnectionBackend::DescriptionStates PeerConnectionBackend::Descripti WTFMove(pendingRemoteDescriptionSdp).isolatedCopy() }; } - } // namespace WebCore +namespace WTF { + +template +struct LogArgument; + +template <> +struct LogArgument { + static String toString(const WebCore::PeerConnectionBackend::TransceiverState&); +}; + +template <> +struct LogArgument { + static String toString(const WebCore::PeerConnectionBackend::TransceiverStates&); +}; + +} + #endif // ENABLE(WEB_RTC) diff --git a/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h b/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h index 7c0d67db592a3..15d7136bdc131 100644 --- a/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h +++ b/Source/WebCore/platform/mediastream/RTCRtpTransceiverDirection.h @@ -27,6 +27,8 @@ #if ENABLE(WEB_RTC) +#include + namespace WebCore { enum class RTCRtpTransceiverDirection { @@ -36,6 +38,8 @@ enum class RTCRtpTransceiverDirection { Inactive }; +String convertEnumerationToString(RTCRtpTransceiverDirection); // in JSRTCRtpTransceiverDirection.h + } // namespace WebCore #endif From 18da40688f3747b6c9d646373f2374ac9f932fc3 Mon Sep 17 00:00:00 2001 From: Xabier Rodriguez-Calvar Date: Wed, 3 Jul 2024 06:41:32 -0700 Subject: [PATCH 09/15] [MediaStream][GStreamer] Hardware decoder/sinks do not have src pads https://bugs.webkit.org/show_bug.cgi?id=276170 Reviewed by Philippe Normand. In the MediaStream use case, we are trying to get decoder stats from a pad probe at the src pad of the video decoder. The problem comes with the decoder/sink elements that don't have one because they are the end of the pipeline. * Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: (WebCore::MediaPlayerPrivateGStreamer::configureVideoDecoder): Canonical link: https://commits.webkit.org/280622@main --- .../graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp index d2d076a2aba7c..4a82a4e19aef8 100644 --- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp +++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp @@ -3042,6 +3042,10 @@ void MediaPlayerPrivateGStreamer::configureVideoDecoder(GstElement* decoder) g_object_set(decoder, "max-errors", -1, nullptr); auto pad = adoptGRef(gst_element_get_static_pad(decoder, "src")); + if (!pad) { + GST_INFO_OBJECT(pipeline(), "the decoder %s does not have a src pad, probably because it's a hardware decoder sink, can't get decoder stats", name.get()); + return; + } gst_pad_add_probe(pad.get(), static_cast(GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_BUFFER), [](GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn { auto* player = static_cast(userData); if (GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_BUFFER) { From bd228726d5c3e8341a4ebfe924e62fcef6d40010 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Wed, 24 Jul 2024 09:09:07 +0100 Subject: [PATCH 10/15] [GStreamer][WebRTC] Minor improvements in incoming track processor https://bugs.webkit.org/show_bug.cgi?id=276988 Reviewed by Xabier Rodriguez-Calvar. The msid information can be present in the pad caps, so when that's the case inspecting the SDP is not required. Also we now send the force-key-unit event only for video tracks. * Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp: (WebCore::GStreamerIncomingTrackProcessor::configure): (WebCore::GStreamerIncomingTrackProcessor::incomingTrackProcessor): Canonical link: https://commits.webkit.org/281286@main --- .../GStreamerIncomingTrackProcessor.cpp | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp index c429779937d54..50c3009dbbb03 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp @@ -53,7 +53,20 @@ void GStreamerIncomingTrackProcessor::configure(GStreamerMediaEndpoint* endPoint m_data.caps = WTFMove(caps); g_object_get(m_pad.get(), "transceiver", &m_data.transceiver.outPtr(), nullptr); - retrieveMediaStreamAndTrackIdFromSDP(); + + auto structure = gst_caps_get_structure(m_data.caps.get(), 0); + unsigned ssrc; + if (gst_structure_get_uint(structure, "ssrc", &ssrc)) { + auto msIdAttributeName = makeString("ssrc-"_s, ssrc, "-msid"_s); + auto msIdAttribute = gst_structure_get_string(structure, msIdAttributeName.ascii().data()); + auto components = String::fromUTF8(msIdAttribute).split(' '); + if (components.size() == 2) + m_sdpMsIdAndTrackId = { components[0], components[1] }; + } + + if (m_sdpMsIdAndTrackId.second.isEmpty()) + retrieveMediaStreamAndTrackIdFromSDP(); + m_data.mediaStreamId = mediaStreamIdFromPad(); if (!m_sdpMsIdAndTrackId.second.isEmpty()) @@ -128,8 +141,10 @@ GRefPtr GStreamerIncomingTrackProcessor::incomingTrackProcessor() if (m_data.type == RealtimeMediaSource::Type::Audio) return createParser(); - GST_DEBUG_OBJECT(m_bin.get(), "Requesting a key-frame"); - gst_pad_send_event(m_pad.get(), gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, TRUE, 1)); + if (m_data.type == RealtimeMediaSource::Type::Video) { + GST_DEBUG_OBJECT(m_bin.get(), "Requesting a key-frame"); + gst_pad_send_event(m_pad.get(), gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, TRUE, 1)); + } bool forceEarlyVideoDecoding = !g_strcmp0(g_getenv("WEBKIT_GST_WEBRTC_FORCE_EARLY_VIDEO_DECODING"), "1"); GST_DEBUG_OBJECT(m_bin.get(), "Configuring for input caps: %" GST_PTR_FORMAT "%s", m_data.caps.get(), forceEarlyVideoDecoding ? " and early decoding" : ""); From 88bdf0a6932faaab1e2705ba8ab0c76b7ac70951 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Wed, 24 Jul 2024 09:14:41 +0100 Subject: [PATCH 11/15] [GStreamer][WebRTC] Simplified pipeline for incoming tracks https://bugs.webkit.org/show_bug.cgi?id=276989 Reviewed by Xabier Rodriguez-Calvar. The incoming track processor now feeds a single sink, no tee or dynamic pipeline manipulations involved anymore. This brings back a timeout in webrtc/h265.html, but it will be fixed once we have track events dispatching fixed (bug #275685). * LayoutTests/platform/glib/TestExpectations: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::GStreamerMediaEndpoint::setConfiguration): (WebCore::GStreamerMediaEndpoint::connectIncomingTrack): (WebCore::GStreamerMediaEndpoint::connectPad): * Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp: (WebCore::GStreamerIncomingTrackProcessor::configure): (WebCore::GStreamerIncomingTrackProcessor::incomingTrackProcessor): (WebCore::GStreamerIncomingTrackProcessor::createParser): (WebCore::GStreamerIncomingTrackProcessor::stats): * Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h: * Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp: * Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h: * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp: * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.cpp: (WebCore::RealtimeIncomingSourceGStreamer::RealtimeIncomingSourceGStreamer): (WebCore::RealtimeIncomingSourceGStreamer::setBin): (WebCore::RealtimeIncomingSourceGStreamer::registerClient): (WebCore::RealtimeIncomingSourceGStreamer::unregisterClient): (WebCore::RealtimeIncomingSourceGStreamer::unregisterClientLocked): (WebCore::RealtimeIncomingSourceGStreamer::forEachClient): (WebCore::RealtimeIncomingSourceGStreamer::handleUpstreamEvent): (WebCore::RealtimeIncomingSourceGStreamer::handleUpstreamQuery): (WebCore::RealtimeIncomingSourceGStreamer::handleDownstreamEvent): (WebCore::RealtimeIncomingSourceGStreamer::setUpstreamBin): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::startProducingData): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::stopProducingData): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::configureAppSink): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::configureFakeVideoSink): Deleted. * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.h: (WebCore::RealtimeIncomingSourceGStreamer::bin const): (WebCore::RealtimeIncomingSourceGStreamer::bin): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::setIsUpstreamDecoding): Deleted. (WebCore::RealtimeIncomingSourceGStreamer::dispatchSample): Deleted. * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp: (WebCore::RealtimeIncomingVideoSourceGStreamer::setBin): (WebCore::RealtimeIncomingVideoSourceGStreamer::dispatchSample): (WebCore::RealtimeIncomingVideoSourceGStreamer::setUpstreamBin): Deleted. * Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.h: Canonical link: https://commits.webkit.org/281394@main --- LayoutTests/platform/glib/TestExpectations | 2 + .../gstreamer/GStreamerMediaEndpoint.cpp | 21 +- .../GStreamerIncomingTrackProcessor.cpp | 61 +++-- .../GStreamerIncomingTrackProcessor.h | 4 +- .../gstreamer/GStreamerMediaStreamSource.cpp | 48 ++-- .../gstreamer/GStreamerWebRTCCommon.h | 1 - .../RealtimeIncomingAudioSourceGStreamer.cpp | 3 - .../RealtimeIncomingSourceGStreamer.cpp | 255 ++++++------------ .../RealtimeIncomingSourceGStreamer.h | 27 +- .../RealtimeIncomingVideoSourceGStreamer.cpp | 18 +- .../RealtimeIncomingVideoSourceGStreamer.h | 2 +- 11 files changed, 175 insertions(+), 267 deletions(-) diff --git a/LayoutTests/platform/glib/TestExpectations b/LayoutTests/platform/glib/TestExpectations index a3023fa33a1a7..56744c4b904a9 100644 --- a/LayoutTests/platform/glib/TestExpectations +++ b/LayoutTests/platform/glib/TestExpectations @@ -1474,6 +1474,8 @@ webrtc/video-av1.html [ Skip ] # GStreamer's DTLS agent currently generates RSA certificates only. DTLS 1.2 is not supported yet (AFAIK). webrtc/datachannel/dtls10.html [ Failure ] +# FIXME: Remove Timeout expectation once bug #275685 is fixed. +webkit.org/b/269285 webrtc/h265.html [ Failure Timeout ] # Too slow with filtering implemented in WebKit. Should be done directly by GstWebRTC. webrtc/datachannel/filter-ice-candidate.html [ Skip ] diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp index 65a58fed6b1f0..e1f6a467ee0ef 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp @@ -282,9 +282,8 @@ bool GStreamerMediaEndpoint::setConfiguration(MediaEndpointConfiguration& config // WIP: https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/302 GST_FIXME("%zu custom certificates not propagated to webrtcbin", configuration.certificates.size()); - gst_element_set_state(m_pipeline.get(), GST_STATE_READY); - GST_DEBUG_OBJECT(m_pipeline.get(), "End-point ready"); gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING); + GST_DEBUG_OBJECT(m_pipeline.get(), "End-point ready"); return true; } @@ -998,24 +997,17 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data) m_peerConnectionBackend.addPendingTrackEvent({ Ref(transceiver->receiver()), Ref(transceiver->receiver().track()), { }, Ref(*transceiver) }); auto mediaStreamBin = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_pipeline.get()), data.mediaStreamBinName.ascii().data())); - auto tee = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(mediaStreamBin.get()), "tee")); - GstElement* bin = nullptr; auto& track = transceiver->receiver().track(); auto& source = track.privateTrack().source(); if (source.isIncomingAudioSource()) { auto& audioSource = static_cast(source); - audioSource.setUpstreamBin(mediaStreamBin); - audioSource.setIsUpstreamDecoding(data.isUpstreamDecoding); - bin = audioSource.bin(); + if (!audioSource.setBin(mediaStreamBin)) + return; } else if (source.isIncomingVideoSource()) { auto& videoSource = static_cast(source); - videoSource.setUpstreamBin(mediaStreamBin); - videoSource.setIsUpstreamDecoding(data.isUpstreamDecoding); - bin = videoSource.bin(); + if (!videoSource.setBin(mediaStreamBin)) + return; } - ASSERT(bin); - - gst_bin_add(GST_BIN_CAST(m_pipeline.get()), bin); auto& mediaStream = mediaStreamFromRTCStream(data.mediaStreamId); mediaStream.addTrackFromPlatform(track); @@ -1033,6 +1025,7 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data) auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".connected-"_s, data.mediaStreamId); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data()); #endif + gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING); } void GStreamerMediaEndpoint::connectPad(GstPad* pad) @@ -1048,7 +1041,7 @@ void GStreamerMediaEndpoint::connectPad(GstPad* pad) auto sinkPad = adoptGRef(gst_element_get_static_pad(bin, "sink")); gst_pad_link(pad, sinkPad.get()); - gst_element_sync_state_with_parent(bin); + gst_element_set_state(bin, GST_STATE_PAUSED); #ifndef GST_DISABLE_GST_DEBUG auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".pending-"_s, GST_OBJECT_NAME(pad)); diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp index 50c3009dbbb03..21d15aa32d8f3 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp @@ -41,17 +41,25 @@ void GStreamerIncomingTrackProcessor::configure(GStreamerMediaEndpoint* endPoint { m_endPoint = endPoint; m_pad = WTFMove(pad); - m_data.mediaStreamBinName = makeString(GST_OBJECT_NAME(m_pad.get())); - m_bin = gst_bin_new(m_data.mediaStreamBinName.ascii().data()); auto caps = adoptGRef(gst_pad_get_current_caps(m_pad.get())); if (!caps) caps = adoptGRef(gst_pad_query_caps(m_pad.get(), nullptr)); - GST_DEBUG_OBJECT(m_bin.get(), "Processing track with caps %" GST_PTR_FORMAT, caps.get()); - m_data.type = doCapsHaveType(caps.get(), "audio") ? RealtimeMediaSource::Type::Audio : RealtimeMediaSource::Type::Video; + ASCIILiteral typeName; + if (doCapsHaveType(caps.get(), "audio")) { + typeName = "audio"_s; + m_data.type = RealtimeMediaSource::Type::Audio; + } else { + typeName = "video"_s; + m_data.type = RealtimeMediaSource::Type::Video; + } m_data.caps = WTFMove(caps); + m_data.mediaStreamBinName = makeString("incoming-"_s, typeName, "-track-"_s, GST_OBJECT_NAME(m_pad.get())); + m_bin = gst_bin_new(m_data.mediaStreamBinName.ascii().data()); + GST_DEBUG_OBJECT(m_bin.get(), "Processing track with caps %" GST_PTR_FORMAT, m_data.caps.get()); + g_object_get(m_pad.get(), "transceiver", &m_data.transceiver.outPtr(), nullptr); auto structure = gst_caps_get_structure(m_data.caps.get(), 0); @@ -72,15 +80,30 @@ void GStreamerIncomingTrackProcessor::configure(GStreamerMediaEndpoint* endPoint if (!m_sdpMsIdAndTrackId.second.isEmpty()) m_data.trackId = m_sdpMsIdAndTrackId.second; - m_tee = gst_element_factory_make("tee", "tee"); - g_object_set(m_tee.get(), "allow-not-linked", TRUE, nullptr); + m_sink = gst_element_factory_make("fakesink", "sink"); + g_object_set(m_sink.get(), "sync", TRUE, "enable-last-sample", FALSE, nullptr); + auto queue = gst_element_factory_make("queue", "queue"); auto trackProcessor = incomingTrackProcessor(); - m_data.isUpstreamDecoding = m_isDecoding; - gst_bin_add_many(GST_BIN_CAST(m_bin.get()), m_tee.get(), trackProcessor.get(), nullptr); + gst_bin_add_many(GST_BIN_CAST(m_bin.get()), trackProcessor.get(), queue, m_sink.get(), nullptr); + gst_element_link(queue, m_sink.get()); + auto sinkPad = adoptGRef(gst_element_get_static_pad(trackProcessor.get(), "sink")); gst_element_add_pad(m_bin.get(), gst_ghost_pad_new("sink", sinkPad.get())); + + if (m_data.type != RealtimeMediaSource::Type::Video || !m_isDecoding) + return; + + auto sinkSinkPad = adoptGRef(gst_element_get_static_pad(m_sink.get(), "sink")); + gst_pad_add_probe(sinkSinkPad.get(), GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, reinterpret_cast(+[](GstPad*, GstPadProbeInfo* info, gpointer) -> GstPadProbeReturn { + auto query = GST_PAD_PROBE_INFO_QUERY(info); + if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION) + return GST_PAD_PROBE_OK; + + gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr); + return GST_PAD_PROBE_REMOVE; + }), nullptr, nullptr); } String GStreamerIncomingTrackProcessor::mediaStreamIdFromPad() @@ -166,12 +189,6 @@ GRefPtr GStreamerIncomingTrackProcessor::incomingTrackProcessor() GRefPtr decodebin = makeGStreamerElement("decodebin3", nullptr); m_isDecoding = true; - m_queue = gst_element_factory_make("queue", nullptr); - m_fakeVideoSink = makeGStreamerElement("fakevideosink", nullptr); - g_object_set(m_fakeVideoSink.get(), "enable-last-sample", FALSE, nullptr); - gst_bin_add_many(GST_BIN_CAST(m_bin.get()), m_queue.get(), m_fakeVideoSink.get(), nullptr); - gst_element_link(m_queue.get(), m_fakeVideoSink.get()); - g_signal_connect(decodebin.get(), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin*, GstElement* element, gpointer) { auto elementClass = makeString(gst_element_get_metadata(element, GST_ELEMENT_METADATA_KLASS)); auto classifiers = elementClass.split('/'); @@ -207,13 +224,9 @@ GRefPtr GStreamerIncomingTrackProcessor::incomingTrackProcessor() }), this); g_signal_connect_swapped(decodebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) { - auto sinkPad = adoptGRef(gst_element_get_static_pad(self->m_tee.get(), "sink")); + auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue")); + auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); gst_pad_link(pad, sinkPad.get()); - - gst_element_link(self->m_tee.get(), self->m_queue.get()); - gst_element_sync_state_with_parent(self->m_tee.get()); - gst_element_sync_state_with_parent(self->m_queue.get()); - gst_element_sync_state_with_parent(self->m_fakeVideoSink.get()); self->trackReady(); }), this); return decodebin; @@ -232,9 +245,9 @@ GRefPtr GStreamerIncomingTrackProcessor::createParser() }), nullptr); g_signal_connect_swapped(parsebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) { - auto sinkPad = adoptGRef(gst_element_get_static_pad(self->m_tee.get(), "sink")); + auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue")); + auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); gst_pad_link(pad, sinkPad.get()); - gst_element_sync_state_with_parent(self->m_tee.get()); self->trackReady(); }), this); return parsebin; @@ -265,7 +278,9 @@ const GstStructure* GStreamerIncomingTrackProcessor::stats() m_stats.reset(gst_structure_new_empty("incoming-video-stats")); uint64_t droppedVideoFrames = 0; GUniqueOutPtr stats; - g_object_get(m_fakeVideoSink.get(), "stats", &stats.outPtr(), nullptr); + + g_object_get(m_sink.get(), "stats", &stats.outPtr(), nullptr); + if (!gst_structure_get_uint64(stats.get(), "dropped", &droppedVideoFrames)) return m_stats.get(); diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h index 58835d483bd56..170597322c743 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.h @@ -61,7 +61,6 @@ class GStreamerIncomingTrackProcessor : public RefCounted m_pad; GRefPtr m_bin; - GRefPtr m_tee; WebRTCTrackData m_data; std::pair m_sdpMsIdAndTrackId; @@ -69,8 +68,7 @@ class GStreamerIncomingTrackProcessor : public RefCounted m_queue; - GRefPtr m_fakeVideoSink; + GRefPtr m_sink; GUniquePtr m_stats; bool m_isReady { false }; }; diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp index f1c5640ad2ca3..1fedcd9a55f2d 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp @@ -124,14 +124,6 @@ class WebKitMediaStreamObserver : public MediaStreamPrivate::Observer { static void webkitMediaStreamSrcEnsureStreamCollectionPosted(WebKitMediaStreamSrc*); -#if USE(GSTREAMER_WEBRTC) -struct InternalSourcePadProbeData { - RealtimeIncomingSourceGStreamer* incomingSource; - int clientId; -}; -WEBKIT_DEFINE_ASYNC_DATA_STRUCT(InternalSourcePadProbeData) -#endif - class InternalSource final : public MediaStreamTrackPrivate::Observer, public RealtimeMediaSource::Observer, public RealtimeMediaSource::AudioSampleObserver, @@ -200,7 +192,7 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer, { #if USE(GSTREAMER_WEBRTC) auto& trackSource = m_track.source(); - std::optional clientId; + int clientId; auto client = GRefPtr(m_src); if (trackSource.isIncomingAudioSource()) { auto& source = static_cast(trackSource); @@ -219,23 +211,15 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer, clientId = source.registerClient(WTFMove(client)); } - if (!clientId) { - GST_WARNING_OBJECT(m_src.get(), "Incoming track registration failed, track likely not ready yet."); - return; - } - - m_webrtcSourceClientId = *clientId; - - auto data = createInternalSourcePadProbeData(); - data->incomingSource = static_cast(&trackSource); - data->clientId = *m_webrtcSourceClientId; + m_webrtcSourceClientId = clientId; + auto incomingSource = static_cast(&trackSource); auto srcPad = adoptGRef(gst_element_get_static_pad(m_src.get(), "src")); gst_pad_add_probe(srcPad.get(), static_cast(GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | GST_PAD_PROBE_TYPE_QUERY_UPSTREAM), reinterpret_cast(+[](GstPad* pad, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn { - auto data = static_cast(userData); - if (!data->incomingSource) + auto weakSource = static_cast*>(userData); + if (!weakSource) return GST_PAD_PROBE_REMOVE; - + auto incomingSource = weakSource->get(); auto src = adoptGRef(gst_pad_get_parent_element(pad)); if (GST_IS_QUERY(info->data)) { switch (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info))) { @@ -248,21 +232,23 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer, } else GST_DEBUG_OBJECT(src.get(), "Proxying event %" GST_PTR_FORMAT " to appsink peer", GST_PAD_PROBE_INFO_EVENT(info)); - if (data->incomingSource->isIncomingAudioSource()) { - auto& source = static_cast(*data->incomingSource); + if (incomingSource->isIncomingAudioSource()) { + auto& source = static_cast(*incomingSource); if (GST_IS_EVENT(info->data)) - source.handleUpstreamEvent(GRefPtr(GST_PAD_PROBE_INFO_EVENT(info)), data->clientId); - else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info), data->clientId)) + source.handleUpstreamEvent(GRefPtr(GST_PAD_PROBE_INFO_EVENT(info))); + else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info))) return GST_PAD_PROBE_HANDLED; - } else if (data->incomingSource->isIncomingVideoSource()) { - auto& source = static_cast(*data->incomingSource); + } else if (incomingSource->isIncomingVideoSource()) { + auto& source = static_cast(*incomingSource); if (GST_IS_EVENT(info->data)) - source.handleUpstreamEvent(GRefPtr(GST_PAD_PROBE_INFO_EVENT(info)), data->clientId); - else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info), data->clientId)) + source.handleUpstreamEvent(GRefPtr(GST_PAD_PROBE_INFO_EVENT(info))); + else if (source.handleUpstreamQuery(GST_PAD_PROBE_INFO_QUERY(info))) return GST_PAD_PROBE_HANDLED; } return GST_PAD_PROBE_OK; - }), data, reinterpret_cast(destroyInternalSourcePadProbeData)); + }), new WeakPtr { incomingSource }, reinterpret_cast(+[](gpointer data) { + delete static_cast*>(data); + })); #endif } diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h index 75c39dcfa7d23..2dbe952a8d75c 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerWebRTCCommon.h @@ -27,7 +27,6 @@ using WebRTCTrackData = struct _WebRTCTrackData { String trackId; String mediaStreamBinName; GRefPtr transceiver; - bool isUpstreamDecoding; RealtimeMediaSource::Type type; GRefPtr caps; }; diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp index 181fc296f8fcb..ec784a333dd36 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceGStreamer.cpp @@ -37,9 +37,6 @@ RealtimeIncomingAudioSourceGStreamer::RealtimeIncomingAudioSourceGStreamer(AtomS std::call_once(debugRegisteredFlag, [] { GST_DEBUG_CATEGORY_INIT(webkit_webrtc_incoming_audio_debug, "webkitwebrtcincomingaudio", 0, "WebKit WebRTC incoming audio"); }); - static Atomic sourceCounter = 0; - gst_element_set_name(bin(), makeString("incoming-audio-source-"_s, sourceCounter.exchangeAdd(1)).ascii().data()); - GST_DEBUG_OBJECT(bin(), "New incoming audio source created with ID %s", persistentID().ascii().data()); } RealtimeIncomingAudioSourceGStreamer::~RealtimeIncomingAudioSourceGStreamer() diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.cpp b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.cpp index 79fb1a92fc74b..a899e6b5acd6d 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.cpp @@ -39,25 +39,51 @@ RealtimeIncomingSourceGStreamer::RealtimeIncomingSourceGStreamer(Type type, Atom std::call_once(debugRegisteredFlag, [] { GST_DEBUG_CATEGORY_INIT(webkit_webrtc_incoming_media_debug, "webkitwebrtcincoming", 0, "WebKit WebRTC incoming media"); }); - m_bin = gst_bin_new(nullptr); } -void RealtimeIncomingSourceGStreamer::setUpstreamBin(const GRefPtr& bin) +bool RealtimeIncomingSourceGStreamer::setBin(const GRefPtr& bin) { - m_upstreamBin = bin; - m_tee = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_upstreamBin.get()), "tee")); -} + ASSERT(!m_bin); + if (UNLIKELY(m_bin)) { + GST_ERROR_OBJECT(m_bin.get(), "Calling setBin twice on the same incoming source instance is not allowed"); + return false; + } -void RealtimeIncomingSourceGStreamer::startProducingData() -{ - GST_DEBUG_OBJECT(bin(), "Starting data flow"); - m_isStarted = true; -} + m_bin = bin; + m_sink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_bin.get()), "sink")); + g_object_set(m_sink.get(), "signal-handoffs", TRUE, nullptr); + + auto handoffCallback = G_CALLBACK(+[](GstElement*, GstBuffer* buffer, GstPad* pad, gpointer userData) { + auto source = reinterpret_cast(userData); + auto caps = adoptGRef(gst_pad_get_current_caps(pad)); + auto sample = adoptGRef(gst_sample_new(buffer, caps.get(), nullptr, nullptr)); + // dispatchSample might trigger RealtimeMediaSource::notifySettingsDidChangeObservers() + // which expects to run in the main thread. + callOnMainThread([source, sample = WTFMove(sample)]() mutable { + source->dispatchSample(WTFMove(sample)); + }); + }); + g_signal_connect(m_sink.get(), "preroll-handoff", handoffCallback, this); + g_signal_connect(m_sink.get(), "handoff", handoffCallback, this); + + auto sinkPad = adoptGRef(gst_element_get_static_pad(m_sink.get(), "sink")); + gst_pad_add_probe(sinkPad.get(), static_cast(GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM), reinterpret_cast(+[](GstPad* pad, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn { + auto self = reinterpret_cast(userData); + if (info->type & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) { + GRefPtr event = GST_PAD_PROBE_INFO_EVENT(info); + auto sink = adoptGRef(gst_pad_get_parent_element(pad)); + self->handleDownstreamEvent(sink.get(), WTFMove(event)); + return GST_PAD_PROBE_OK; + } -void RealtimeIncomingSourceGStreamer::stopProducingData() -{ - GST_DEBUG_OBJECT(bin(), "Stopping data flow"); - m_isStarted = false; + auto query = GST_PAD_PROBE_INFO_QUERY(info); + self->forEachClient([&](auto* appsrc) { + auto srcSrcPad = adoptGRef(gst_element_get_static_pad(appsrc, "src")); + gst_pad_peer_query(srcSrcPad.get(), query); + }); + return GST_PAD_PROBE_OK; + }), this, nullptr); + return true; } const RealtimeMediaSourceCapabilities& RealtimeIncomingSourceGStreamer::capabilities() @@ -75,185 +101,82 @@ bool RealtimeIncomingSourceGStreamer::hasClient(const GRefPtr& appsr return false; } -std::optional RealtimeIncomingSourceGStreamer::registerClient(GRefPtr&& appsrc) +int RealtimeIncomingSourceGStreamer::registerClient(GRefPtr&& appsrc) { - if (!m_tee) - return std::nullopt; - Locker lock { m_clientLock }; static Atomic counter = 1; auto clientId = counter.exchangeAdd(1); - auto* queue = gst_element_factory_make("queue", makeString("queue-"_s, clientId).ascii().data()); - auto* sink = makeGStreamerElement("appsink", makeString("sink-"_s, clientId).ascii().data()); - g_object_set(sink, "enable-last-sample", FALSE, nullptr); - - if (!m_clientQuark) - m_clientQuark = g_quark_from_static_string("client-id"); - g_object_set_qdata(G_OBJECT(sink), m_clientQuark, GINT_TO_POINTER(clientId)); - GST_DEBUG_OBJECT(m_bin.get(), "Client %" GST_PTR_FORMAT " with id %d associated to new sink %" GST_PTR_FORMAT, appsrc.get(), clientId, sink); m_clients.add(clientId, WTFMove(appsrc)); - - static GstAppSinkCallbacks callbacks = { - nullptr, // eos - [](GstAppSink* sink, gpointer userData) -> GstFlowReturn { - auto self = reinterpret_cast(userData); - auto sample = adoptGRef(gst_app_sink_pull_preroll(sink)); - // dispatchSample might trigger RealtimeMediaSource::notifySettingsDidChangeObservers() - // which expects to run in the main thread. - callOnMainThread([self, sample = WTFMove(sample)]() mutable { - self->dispatchSample(WTFMove(sample)); - }); - return GST_FLOW_OK; - }, - [](GstAppSink* sink, gpointer userData) -> GstFlowReturn { - auto self = reinterpret_cast(userData); - auto sample = adoptGRef(gst_app_sink_pull_sample(sink)); - // dispatchSample might trigger RealtimeMediaSource::notifySettingsDidChangeObservers() - // which expects to run in the main thread. - callOnMainThread([self, sample = WTFMove(sample)]() mutable { - self->dispatchSample(WTFMove(sample)); - }); - return GST_FLOW_OK; - }, - [](GstAppSink* sink, gpointer userData) -> gboolean { - auto self = reinterpret_cast(userData); - auto event = adoptGRef(GST_EVENT_CAST(gst_app_sink_pull_object(sink))); - switch (GST_EVENT_TYPE(event.get())) { - case GST_EVENT_STREAM_START: - case GST_EVENT_CAPS: - case GST_EVENT_SEGMENT: - case GST_EVENT_STREAM_COLLECTION: - return false; - case GST_EVENT_LATENCY: { - GstClockTime minLatency, maxLatency; - if (gst_base_sink_query_latency(GST_BASE_SINK(sink), nullptr, nullptr, &minLatency, &maxLatency)) { - if (int clientId = GPOINTER_TO_INT(g_object_get_qdata(G_OBJECT(sink), self->m_clientQuark))) { - GST_DEBUG_OBJECT(sink, "Setting client latency to min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS(minLatency), GST_TIME_ARGS(maxLatency)); - auto appsrc = self->m_clients.get(clientId); - g_object_set(appsrc, "min-latency", minLatency, "max-latency", maxLatency, nullptr); - } - } - return false; - } - default: - break; - } - - if (int clientId = GPOINTER_TO_INT(g_object_get_qdata(G_OBJECT(sink), self->m_clientQuark))) { - GST_DEBUG_OBJECT(sink, "Forwarding event %" GST_PTR_FORMAT " to client", event.get()); - auto appsrc = self->m_clients.get(clientId); - auto pad = adoptGRef(gst_element_get_static_pad(appsrc, "src")); - gst_pad_push_event(pad.get(), event.leakRef()); - } - - return false; - }, -#if GST_CHECK_VERSION(1, 24, 0) - // propose_allocation - nullptr, -#endif - { nullptr } - }; - gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, nullptr); - - auto sinkPad = adoptGRef(gst_element_get_static_pad(sink, "sink")); - gst_pad_add_probe(sinkPad.get(), GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, reinterpret_cast(+[](GstPad* pad, GstPadProbeInfo* info, RealtimeIncomingSourceGStreamer* self) -> GstPadProbeReturn { - auto query = GST_QUERY_CAST(info->data); - if (self->isIncomingVideoSource() && self->m_isUpstreamDecoding && GST_QUERY_TYPE(query) == GST_QUERY_ALLOCATION) - gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); - - auto sink = adoptGRef(gst_pad_get_parent_element(pad)); - int clientId = GPOINTER_TO_INT(g_object_get_qdata(G_OBJECT(sink.get()), self->m_clientQuark)); - if (!clientId) - return GST_PAD_PROBE_OK; - - auto appsrc = self->m_clients.get(clientId); - auto srcSrcPad = adoptGRef(gst_element_get_static_pad(appsrc, "src")); - if (gst_pad_peer_query(srcSrcPad.get(), query)) - return GST_PAD_PROBE_HANDLED; - - return GST_PAD_PROBE_OK; - }), this, nullptr); - - auto padName = makeString("src_"_s, clientId); - auto teeSrcPad = adoptGRef(gst_element_request_pad_simple(m_tee.get(), padName.ascii().data())); - - GUniquePtr name(gst_pad_get_name(teeSrcPad.get())); - auto ghostSrcPad = gst_ghost_pad_new(name.get(), teeSrcPad.get()); - gst_element_add_pad(m_upstreamBin.get(), ghostSrcPad); - - gst_bin_add_many(GST_BIN_CAST(m_bin.get()), queue, sink, nullptr); - gst_element_link(queue, sink); - - auto queueSinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink")); - auto ghostSinkPadName = makeString("sink-"_s, clientId); - auto ghostSinkPad = gst_ghost_pad_new(ghostSinkPadName.ascii().data(), queueSinkPad.get()); - gst_element_add_pad(m_bin.get(), ghostSinkPad); - - gst_pad_link(ghostSrcPad, ghostSinkPad); - gst_element_sync_state_with_parent(queue); - gst_element_sync_state_with_parent(sink); - gst_element_sync_state_with_parent(m_bin.get()); - - GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN_CAST(m_bin.get()), GST_DEBUG_GRAPH_SHOW_ALL, GST_OBJECT_NAME(m_bin.get())); return clientId; } void RealtimeIncomingSourceGStreamer::unregisterClient(int clientId) { Locker lock { m_clientLock }; - unregisterClientLocked(clientId); + GST_DEBUG_OBJECT(m_bin.get(), "Unregistering client %d", clientId); + m_clients.remove(clientId); } -void RealtimeIncomingSourceGStreamer::unregisterClientLocked(int clientId) +void RealtimeIncomingSourceGStreamer::forEachClient(Function&& applyFunction) { - GST_DEBUG_OBJECT(m_bin.get(), "Unregistering client %d", clientId); - auto name = makeString("sink-", clientId); - auto sink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_bin.get()), name.ascii().data())); - auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_bin.get()), makeString("queue-", clientId).ascii().data())); - - auto ghostSinkPad = adoptGRef(gst_element_get_static_pad(m_bin.get(), name.ascii().data())); - auto padName = makeString("src_"_s, clientId); - auto teeSrcPad = adoptGRef(gst_element_get_static_pad(m_tee.get(), padName.ascii().data())); - - gst_element_set_locked_state(m_upstreamBin.get(), TRUE); - gst_element_set_locked_state(m_bin.get(), TRUE); - gst_element_set_state(queue.get(), GST_STATE_NULL); - gst_element_set_state(sink.get(), GST_STATE_NULL); - gst_pad_unlink(teeSrcPad.get(), ghostSinkPad.get()); - gst_element_unlink(queue.get(), sink.get()); - - auto ghostSrcPad = adoptGRef(gst_element_get_static_pad(m_upstreamBin.get(), padName.ascii().data())); - gst_ghost_pad_set_target(GST_GHOST_PAD_CAST(ghostSrcPad.get()), nullptr); - gst_element_remove_pad(m_upstreamBin.get(), ghostSrcPad.get()); - gst_element_release_request_pad(m_tee.get(), teeSrcPad.get()); - - gst_ghost_pad_set_target(GST_GHOST_PAD_CAST(ghostSinkPad.get()), nullptr); - gst_element_remove_pad(m_bin.get(), ghostSinkPad.get()); - - gst_bin_remove_many(GST_BIN_CAST(m_bin.get()), queue.get(), sink.get(), nullptr); - gst_element_set_locked_state(m_bin.get(), FALSE); - gst_element_set_locked_state(m_upstreamBin.get(), FALSE); - m_clients.remove(clientId); + Locker lock { m_clientLock }; + for (auto& client : m_clients.values()) + applyFunction(client.get()); } -void RealtimeIncomingSourceGStreamer::handleUpstreamEvent(GRefPtr&& event, int clientId) +void RealtimeIncomingSourceGStreamer::handleUpstreamEvent(GRefPtr&& event) { + // FIXME: This early return shouldn't be necessary anymore after bug #275685 has been fixed. + if (!m_bin) + return; + GST_DEBUG_OBJECT(m_bin.get(), "Handling %" GST_PTR_FORMAT, event.get()); - auto sink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_bin.get()), makeString("sink-", clientId).ascii().data())); - auto pad = adoptGRef(gst_element_get_static_pad(sink.get(), "sink")); + auto pad = adoptGRef(gst_element_get_static_pad(m_sink.get(), "sink")); gst_pad_push_event(pad.get(), event.leakRef()); } -bool RealtimeIncomingSourceGStreamer::handleUpstreamQuery(GstQuery* query, int clientId) +bool RealtimeIncomingSourceGStreamer::handleUpstreamQuery(GstQuery* query) { + // FIXME: This early return shouldn't be necessary anymore after bug #275685 has been fixed. + if (!m_bin) + return false; + GST_DEBUG_OBJECT(m_bin.get(), "Handling %" GST_PTR_FORMAT, query); - auto sink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_bin.get()), makeString("sink-", clientId).ascii().data())); - auto pad = adoptGRef(gst_element_get_static_pad(sink.get(), "sink")); + auto pad = adoptGRef(gst_element_get_static_pad(m_sink.get(), "sink")); return gst_pad_peer_query(pad.get(), query); } +void RealtimeIncomingSourceGStreamer::handleDownstreamEvent(GstElement* sink, GRefPtr&& event) +{ + switch (GST_EVENT_TYPE(event.get())) { + case GST_EVENT_STREAM_START: + case GST_EVENT_CAPS: + case GST_EVENT_SEGMENT: + case GST_EVENT_STREAM_COLLECTION: + return; + case GST_EVENT_LATENCY: { + GstClockTime minLatency, maxLatency; + if (gst_base_sink_query_latency(GST_BASE_SINK(sink), nullptr, nullptr, &minLatency, &maxLatency)) { + forEachClient([&](auto* appsrc) { + GST_DEBUG_OBJECT(sink, "Setting client latency to min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS(minLatency), GST_TIME_ARGS(maxLatency)); + g_object_set(appsrc, "min-latency", minLatency, "max-latency", maxLatency, nullptr); + }); + } + return; + } + default: + break; + } + + forEachClient([&](auto* appsrc) { + auto pad = adoptGRef(gst_element_get_static_pad(appsrc, "src")); + GRefPtr eventCopy(event); + GST_DEBUG_OBJECT(sink, "Forwarding event %" GST_PTR_FORMAT " to client", eventCopy.get()); + gst_pad_push_event(pad.get(), eventCopy.leakRef()); + }); +} + void RealtimeIncomingSourceGStreamer::tearDown() { notImplemented(); diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.h b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.h index d1d8919768f50..8a61bf0afc21f 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.h +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingSourceGStreamer.h @@ -28,43 +28,34 @@ namespace WebCore { class RealtimeIncomingSourceGStreamer : public RealtimeMediaSource { public: - GstElement* bin() { return m_bin.get(); } - - virtual void setUpstreamBin(const GRefPtr&); + GstElement* bin() const { return m_bin.get(); } + virtual bool setBin(const GRefPtr&); bool hasClient(const GRefPtr&); - std::optional registerClient(GRefPtr&&); + int registerClient(GRefPtr&&); void unregisterClient(int); - void handleUpstreamEvent(GRefPtr&&, int clientId); - bool handleUpstreamQuery(GstQuery*, int clientId); + void handleUpstreamEvent(GRefPtr&&); + bool handleUpstreamQuery(GstQuery*); + void handleDownstreamEvent(GstElement* sink, GRefPtr&&); void tearDown(); - void setIsUpstreamDecoding(bool isUpstreamDecoding) { m_isUpstreamDecoding = isUpstreamDecoding; }; - protected: RealtimeIncomingSourceGStreamer(Type, AtomString&& name); - GRefPtr m_upstreamBin; - GRefPtr m_tee; - private: // RealtimeMediaSource API - void startProducingData() final; - void stopProducingData() final; const RealtimeMediaSourceCapabilities& capabilities() final; - virtual void dispatchSample(GRefPtr&&) { } + virtual void dispatchSample(GRefPtr&&) = 0; - void unregisterClientLocked(int); + void forEachClient(Function&&); GRefPtr m_bin; - GQuark m_clientQuark { 0 }; + GRefPtr m_sink; Lock m_clientLock; HashMap> m_clients WTF_GUARDED_BY_LOCK(m_clientLock); - bool m_isStarted { true }; - bool m_isUpstreamDecoding { false }; }; } // namespace WebCore diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp index d7ad10da3c3e0..5ed7cefc8b054 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.cpp @@ -39,17 +39,15 @@ RealtimeIncomingVideoSourceGStreamer::RealtimeIncomingVideoSourceGStreamer(AtomS std::call_once(debugRegisteredFlag, [] { GST_DEBUG_CATEGORY_INIT(webkit_webrtc_incoming_video_debug, "webkitwebrtcincomingvideo", 0, "WebKit WebRTC incoming video"); }); - static Atomic sourceCounter = 0; - gst_element_set_name(bin(), makeString("incoming-video-source-"_s, sourceCounter.exchangeAdd(1)).ascii().data()); - GST_DEBUG_OBJECT(bin(), "New incoming video source created with ID %s", persistentID().ascii().data()); } -void RealtimeIncomingVideoSourceGStreamer::setUpstreamBin(const GRefPtr& bin) +bool RealtimeIncomingVideoSourceGStreamer::setBin(const GRefPtr& bin) { - RealtimeIncomingSourceGStreamer::setUpstreamBin(bin); + if (!RealtimeIncomingSourceGStreamer::setBin(bin)) + return false; - auto tee = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_upstreamBin.get()), "tee")); - auto sinkPad = adoptGRef(gst_element_get_static_pad(tee.get(), "sink")); + auto sink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(this->bin()), "sink")); + auto sinkPad = adoptGRef(gst_element_get_static_pad(sink.get(), "sink")); gst_pad_add_probe(sinkPad.get(), static_cast(GST_PAD_PROBE_TYPE_BUFFER), [](GstPad*, GstPadProbeInfo* info, gpointer) -> GstPadProbeReturn { auto videoFrameTimeMetadata = std::make_optional({ }); videoFrameTimeMetadata->receiveTime = MonotonicTime::now().secondsSinceEpoch(); @@ -65,6 +63,7 @@ void RealtimeIncomingVideoSourceGStreamer::setUpstreamBin(const GRefPtr&& s ASSERT(isMainThread()); auto* buffer = gst_sample_get_buffer(sample.get()); auto* caps = gst_sample_get_caps(sample.get()); + if (!caps) { + GST_WARNING_OBJECT(bin(), "Received sample without caps, bailing out."); + return; + } + ensureSizeAndFramerate(GRefPtr(caps)); videoFrameAvailable(VideoFrameGStreamer::create(WTFMove(sample), intrinsicSize(), fromGstClockTime(GST_BUFFER_PTS(buffer))), { }); diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.h b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.h index e80e13248b911..11cbf0600eeff 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.h +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceGStreamer.h @@ -36,7 +36,7 @@ class RealtimeIncomingVideoSourceGStreamer : public RealtimeIncomingSourceGStrea const GstStructure* stats(); - void setUpstreamBin(const GRefPtr&) final; + bool setBin(const GRefPtr&) final; protected: RealtimeIncomingVideoSourceGStreamer(AtomString&&); From 4bfbd2597f92b087568137e2ba69f9eed760a9f8 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Thu, 18 Jul 2024 10:06:15 +0100 Subject: [PATCH 12/15] [GStreamer][WebRTC] Apply platform-specific quirks for incoming tracks https://bugs.webkit.org/show_bug.cgi?id=276769 Reviewed by Xabier Rodriguez-Calvar. On platforms where quirks are required, keep hardware-accelerated parsers out of the WebRTC pipeline. They are instead used from the playback pipeline. The LibWebRTC backend had support for this already, this patch brings the same feature to the GstWebRTC backend. * Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp: (WebCore::gstGetAutoplugSelectResult): * Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h: * Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp: (WebCore::GStreamerIncomingTrackProcessor::createParser): * Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp: (WebCore::GStreamerWebRTCVideoDecoder::getGstAutoplugSelectResult): Deleted. Canonical link: https://commits.webkit.org/281292@main --- .../graphics/gstreamer/GStreamerCommon.cpp | 12 ++++++++++++ .../graphics/gstreamer/GStreamerCommon.h | 2 ++ .../GStreamerIncomingTrackProcessor.cpp | 17 +++++++++++++++++ .../GStreamerVideoDecoderFactory.cpp | 19 +++++++------------ 4 files changed, 38 insertions(+), 12 deletions(-) diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp index 2fc5d4f45a8ea..7c99426aa1c95 100644 --- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp +++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp @@ -1005,6 +1005,18 @@ bool gstObjectHasProperty(GstPad* pad, const char* name) return gstObjectHasProperty(GST_OBJECT_CAST(pad), name); } +std::optional gstGetAutoplugSelectResult(ASCIILiteral nick) +{ + static auto enumClass = static_cast(g_type_class_ref(g_type_from_name("GstAutoplugSelectResult"))); + RELEASE_ASSERT(enumClass); + auto enumValue = g_enum_get_value_by_nick(enumClass, nick.characters()); + if (!enumValue) + return std::nullopt; + return enumValue->value; +} + +#undef GST_CAT_DEFAULT + } // namespace WebCore #if !GST_CHECK_VERSION(1, 20, 0) diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h index e1b67aff32471..6e505a7c43bf4 100644 --- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h +++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h @@ -409,6 +409,8 @@ void configureVideoRTPDepayloader(GstElement*); bool gstObjectHasProperty(GstElement*, const char* name); bool gstObjectHasProperty(GstPad*, const char* name); +std::optional gstGetAutoplugSelectResult(ASCIILiteral); + void registerActivePipeline(const GRefPtr&); void unregisterPipeline(const GRefPtr&); diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp index 21d15aa32d8f3..575061a5a502b 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerIncomingTrackProcessor.cpp @@ -22,6 +22,7 @@ #if USE(GSTREAMER_WEBRTC) #include "GStreamerCommon.h" +#include "GStreamerQuirks.h" #include "GStreamerRegistryScanner.h" GST_DEBUG_CATEGORY(webkit_webrtc_incoming_track_processor_debug); @@ -244,6 +245,22 @@ GRefPtr GStreamerIncomingTrackProcessor::createParser() configureVideoRTPDepayloader(element); }), nullptr); + auto& quirksManager = GStreamerQuirksManager::singleton(); + if (quirksManager.isEnabled()) { + // Prevent auto-plugging of hardware-accelerated elements. Those will be used in the playback pipeline. + g_signal_connect(parsebin.get(), "autoplug-select", G_CALLBACK(+[](GstElement*, GstPad*, GstCaps*, GstElementFactory* factory, gpointer) -> unsigned { + static auto skipAutoPlug = gstGetAutoplugSelectResult("skip"_s); + static auto tryAutoPlug = gstGetAutoplugSelectResult("try"_s); + RELEASE_ASSERT(skipAutoPlug); + RELEASE_ASSERT(tryAutoPlug); + auto& quirksManager = GStreamerQuirksManager::singleton(); + auto isHardwareAccelerated = quirksManager.isHardwareAccelerated(factory).value_or(false); + if (isHardwareAccelerated) + return *skipAutoPlug; + return *tryAutoPlug; + }), nullptr); + } + g_signal_connect_swapped(parsebin.get(), "pad-added", G_CALLBACK(+[](GStreamerIncomingTrackProcessor* self, GstPad* pad) { auto queue = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(self->m_bin.get()), "queue")); auto sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); diff --git a/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp b/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp index 4345c02ca4675..9bb483741662b 100644 --- a/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp +++ b/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/GStreamerVideoDecoderFactory.cpp @@ -83,16 +83,6 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder { m_needsKeyframe = true; } - static unsigned getGstAutoplugSelectResult(const char* nick) - { - static GEnumClass* enumClass = static_cast(g_type_class_ref(g_type_from_name("GstAutoplugSelectResult"))); - ASSERT(enumClass); - GEnumValue* ev = g_enum_get_value_by_nick(enumClass, nick); - if (!ev) - return 0; - return ev->value; - } - bool Configure(const webrtc::VideoDecoder::Settings& codecSettings) override { m_src = makeElement("appsrc"); @@ -112,12 +102,17 @@ class GStreamerWebRTCVideoDecoder : public webrtc::VideoDecoder { auto& quirksManager = GStreamerQuirksManager::singleton(); if (quirksManager.isEnabled()) { + // Prevent auto-plugging of hardware-accelerated elements. Those will be used in the playback pipeline. g_signal_connect(decoder, "autoplug-select", G_CALLBACK(+[](GstElement*, GstPad*, GstCaps*, GstElementFactory* factory, gpointer) -> unsigned { + static auto skipAutoPlug = gstGetAutoplugSelectResult("skip"_s); + static auto tryAutoPlug = gstGetAutoplugSelectResult("try"_s); + RELEASE_ASSERT(skipAutoPlug); + RELEASE_ASSERT(tryAutoPlug); auto& quirksManager = GStreamerQuirksManager::singleton(); auto isHardwareAccelerated = quirksManager.isHardwareAccelerated(factory).value_or(false); if (isHardwareAccelerated) - return getGstAutoplugSelectResult("skip"); - return getGstAutoplugSelectResult("try"); + return *skipAutoPlug; + return *tryAutoPlug; }), nullptr); } From 1fc1e0b842d20c6eea2ee74016ad3b0bdb0901ac Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Tue, 6 Aug 2024 08:53:21 -0700 Subject: [PATCH 13/15] [GStreamer][WebRTC] Emit 'track' event before resolving setRemoteDescription's promise https://bugs.webkit.org/show_bug.cgi?id=275685 Reviewed by Philippe Normand. The track event should be emitted after sucessfully applying a remote description and before the promise from setRemoteDescription is resolved. However, up until now the GstWebRTC backend emitted the event based on the 'pad-added' signal. The timing of the signal doesn't match what the spec expects. Fix the issue by querying the transceiver states after applying a remote description sucessfully. By providing the transceiver states to PeerConnectionBackend::setRemoteDescriptionSucceeded(), it's able to emit the track events correctly acccording to the spec. A test is added to check that the track event is emitted before setRemoteDescription resolves, which would fail before this patch in the GstWebRTC backend. * LayoutTests/platform/glib/TestExpectations: * LayoutTests/platform/glib/fast/mediastream/RTCPeerConnection-inspect-answer-expected.txt: * LayoutTests/platform/glib/fast/mediastream/RTCPeerConnection-media-setup-single-dialog-expected.txt: * LayoutTests/platform/glib/fast/mediastream/RTCPeerConnection-setRemoteDescription-offer-expected.txt: * LayoutTests/platform/glib/imported/w3c/web-platform-tests/webrtc/RTCPeerConnection-setLocalDescription-parameterless.https-expected.txt: Removed. * LayoutTests/platform/glib/imported/w3c/web-platform-tests/webrtc/protocol/split.https-expected.txt: Added. * LayoutTests/webrtc/setRemoteDescription-track-expected.txt: Added. * LayoutTests/webrtc/setRemoteDescription-track.html: Added. * Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp: (WebCore::PeerConnectionBackend::setRemoteDescriptionSucceeded): (WebCore::PeerConnectionBackend::setRemoteDescriptionFailed): (WebCore::PeerConnectionBackend::stop): (WebCore::PeerConnectionBackend::dispatchTrackEvent): Deleted. (WebCore::PeerConnectionBackend::addPendingTrackEvent): Deleted. * Source/WebCore/Modules/mediastream/PeerConnectionBackend.h: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp: (WebCore::GStreamerMediaEndpoint::initializePipeline): (WebCore::GStreamerMediaEndpointTransceiverState::isolatedCopy): (WebCore::getMediaStreamIdsFromSDPMedia): (WebCore::isRecvDirection): (WebCore::toGStreamerMediaEndpointTransceiverState): (WebCore::transceiverStatesFromWebRTCBin): (WebCore::GStreamerMediaEndpoint::doSetLocalDescription): (WebCore::GStreamerMediaEndpoint::setTransceiverCodecPreferences): (WebCore::GStreamerMediaEndpoint::doSetRemoteDescription): (WebCore::GStreamerMediaEndpoint::setDescription): (WebCore::GStreamerMediaEndpoint::connectIncomingTrack): * Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h: * Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp: (WebCore::GStreamerPeerConnectionBackend::addPendingTrackEvent): Deleted. (WebCore::GStreamerPeerConnectionBackend::dispatchPendingTrackEvents): Deleted. * Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h: * Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp: (WebCore::MediaStreamTrackPrivate::dataFlowStarted): * Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h: (WebCore::MediaStreamTrackPrivateObserver::dataFlowStarted): * Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp: (webkitMediaStreamSrcCharacteristicsChanged): Canonical link: https://commits.webkit.org/281892@main --- .../mediastream/PeerConnectionBackend.cpp | 31 +-- .../mediastream/PeerConnectionBackend.h | 12 - .../gstreamer/GStreamerMediaEndpoint.cpp | 214 +++++++++++++++--- .../gstreamer/GStreamerMediaEndpoint.h | 4 + .../GStreamerPeerConnectionBackend.cpp | 18 -- .../GStreamerPeerConnectionBackend.h | 5 - .../mediastream/MediaStreamTrackPrivate.cpp | 9 +- .../mediastream/MediaStreamTrackPrivate.h | 4 +- .../gstreamer/GStreamerMediaStreamSource.cpp | 18 +- 9 files changed, 208 insertions(+), 107 deletions(-) diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp index 15a4470a75a61..a73f0eeb30bb2 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.cpp @@ -328,7 +328,7 @@ void PeerConnectionBackend::setRemoteDescriptionSucceeded(std::optionalsource().setMuted(false); } - } else { - // FIXME: Move ports out of m_pendingTrackEvents. - for (auto& event : events) - dispatchTrackEvent(event); } callback({ }); }); } -void PeerConnectionBackend::dispatchTrackEvent(PendingTrackEvent& event) -{ - auto& track = event.track.get(); - - m_peerConnection.dispatchEvent(RTCTrackEvent::create(eventNames().trackEvent, Event::CanBubble::No, Event::IsCancelable::No, WTFMove(event.receiver), WTFMove(event.track), WTFMove(event.streams), WTFMove(event.transceiver))); - ALWAYS_LOG(LOGIDENTIFIER, "Dispatched if feasible track of type ", track.source().type()); - - if (m_peerConnection.isClosed()) - return; - - // FIXME: As per spec, we should set muted to 'false' when starting to receive the content from network. - track.source().setMuted(false); -} - void PeerConnectionBackend::setRemoteDescriptionFailed(Exception&& exception) { ASSERT(isMainThread()); ALWAYS_LOG(LOGIDENTIFIER, "Set remote description failed:", exception.message()); - ASSERT(m_pendingTrackEvents.isEmpty()); - m_pendingTrackEvents.clear(); - ASSERT(m_setDescriptionCallback); m_peerConnection.queueTaskKeepingObjectAlive(m_peerConnection, TaskSource::Networking, [this, callback = WTFMove(m_setDescriptionCallback), exception = WTFMove(exception)]() mutable { if (m_peerConnection.isClosed()) @@ -454,12 +433,6 @@ void PeerConnectionBackend::iceGatheringStateChanged(RTCIceGatheringState state) }); } -void PeerConnectionBackend::addPendingTrackEvent(PendingTrackEvent&& event) -{ - ASSERT(!m_peerConnection.isStopped()); - m_pendingTrackEvents.append(WTFMove(event)); -} - static String extractIPAddress(StringView sdp) { unsigned counter = 0; @@ -590,8 +563,6 @@ void PeerConnectionBackend::stop() m_offerAnswerCallback = nullptr; m_setDescriptionCallback = nullptr; - m_pendingTrackEvents.clear(); - doStop(); } diff --git a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h index a43d01120973e..46a7e7163f778 100644 --- a/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h +++ b/Source/WebCore/Modules/mediastream/PeerConnectionBackend.h @@ -228,16 +228,6 @@ class PeerConnectionBackend void validateSDP(const String&) const; - struct PendingTrackEvent { - Ref receiver; - Ref track; - Vector> streams; - RefPtr transceiver; - }; - void addPendingTrackEvent(PendingTrackEvent&&); - - void dispatchTrackEvent(PendingTrackEvent&); - private: virtual void doCreateOffer(RTCOfferOptions&&) = 0; virtual void doCreateAnswer(RTCAnswerOptions&&) = 0; @@ -255,8 +245,6 @@ class PeerConnectionBackend bool m_shouldFilterICECandidates { true }; - Vector m_pendingTrackEvents; - #if !RELEASE_LOG_DISABLED Ref m_logger; const void* m_logIdentifier; diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp index e1f6a467ee0ef..cfc55dff49cfe 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.cpp @@ -170,6 +170,10 @@ bool GStreamerMediaEndpoint::initializePipeline() }), this); #ifndef GST_DISABLE_GST_DEBUG + g_signal_connect_swapped(m_webrtcBin.get(), "on-new-transceiver", G_CALLBACK(+[](GStreamerMediaEndpoint* endPoint, GstWebRTCRTPTransceiver* transceiver) { + GST_DEBUG_OBJECT(endPoint->m_webrtcBin.get(), "New transceiver: %" GST_PTR_FORMAT, transceiver); + }), this); + g_signal_connect(m_webrtcBin.get(), "notify::connection-state", G_CALLBACK(+[](GstElement* webrtcBin, GParamSpec*, GStreamerMediaEndpoint* endPoint) { GstWebRTCPeerConnectionState state; g_object_get(webrtcBin, "connection-state", &state, nullptr); @@ -370,6 +374,119 @@ static std::optional descriptionsFromW }; } +struct GStreamerMediaEndpointTransceiverState { + String mid; + Vector receiverStreamIds; + std::optional firedDirection; + + GStreamerMediaEndpointTransceiverState isolatedCopy() &&; +}; + +inline GStreamerMediaEndpointTransceiverState GStreamerMediaEndpointTransceiverState::isolatedCopy() && +{ + return { + WTFMove(mid).isolatedCopy(), + crossThreadCopy(WTFMove(receiverStreamIds)), + firedDirection + }; +} + +Vector getMediaStreamIdsFromSDPMedia(const GstSDPMedia& media) +{ + HashSet mediaStreamIdsSet; + for (guint i = 0; i < gst_sdp_media_attributes_len(&media); ++i) { + const auto attribute = gst_sdp_media_get_attribute(&media, i); + if (!g_strcmp0(attribute->key, "msid")) { + auto components = String::fromUTF8(attribute->value).split(' '); + if (components.size() < 2) + continue; + mediaStreamIdsSet.add(components[0]); + } + // MSID may also come in ssrc attributes, specially if they're in an SDP answer. They look like: + // a=ssrc:3612593434 msid:e1019f4a-0983-4863-b923-b75903cced2c webrtctransceiver1 + if (!g_strcmp0(attribute->key, "ssrc")) { + auto outerComponents = String::fromUTF8(attribute->value).split(' '); + for (auto& outer : outerComponents) { + auto innerComponents = outer.split(':'); + if (innerComponents.size() < 2) + continue; + if (innerComponents[0] == "msid"_s) + mediaStreamIdsSet.add(innerComponents[1]); + } + } + } + Vector mediaStreamIds; + mediaStreamIds.reserveCapacity(mediaStreamIdsSet.size()); + for (const auto& msid : mediaStreamIdsSet) + mediaStreamIds.append(msid); + return mediaStreamIds; +} + +inline bool isRecvDirection(GstWebRTCRTPTransceiverDirection direction) +{ + return direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV || direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY; +} + +static std::optional toGStreamerMediaEndpointTransceiverState(GstElement* webrtcBin, GstWebRTCRTPTransceiver* transceiver) +{ + GRefPtr receiver; + GUniqueOutPtr mid; + GstWebRTCRTPTransceiverDirection currentDirection; + guint mLineIndex; + g_object_get(transceiver, "receiver", &receiver.outPtr(), "current-direction", ¤tDirection, "mlineindex", &mLineIndex, "mid", &mid.outPtr(), nullptr); +#ifndef GST_DISABLE_GST_DEBUG + GUniquePtr desc(g_enum_to_string(GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, currentDirection)); + GST_TRACE_OBJECT(webrtcBin, "Receiver = %" GST_PTR_FORMAT ", current-direction = %s, mlineindex = %u, mid = %s", receiver.get(), desc.get(), mLineIndex, GST_STR_NULL(mid.get())); +#endif + + GUniqueOutPtr localDescription, remoteDescription; + g_object_get(webrtcBin, "local-description", &localDescription.outPtr(), "remote-description", &remoteDescription.outPtr(), nullptr); + +#ifndef GST_DISABLE_GST_DEBUG + if (localDescription) { + GUniquePtr sdp(gst_sdp_message_as_text(localDescription->sdp)); + GST_TRACE_OBJECT(webrtcBin, "Local-description:\n%s", sdp.get()); + } + if (remoteDescription) { + GUniquePtr sdp(gst_sdp_message_as_text(remoteDescription->sdp)); + GST_TRACE_OBJECT(webrtcBin, "Remote-description:\n%s", sdp.get()); + } +#endif + + Vector streamIds; + if (remoteDescription && remoteDescription->sdp && mLineIndex < gst_sdp_message_medias_len(remoteDescription->sdp)) { + const GstSDPMedia* media = gst_sdp_message_get_media(remoteDescription->sdp, mLineIndex); + if (isRecvDirection(currentDirection)) + streamIds = getMediaStreamIdsFromSDPMedia(*media); + } + + if (UNLIKELY(!mid)) + return { }; + + return { { String::fromUTF8(mid.get()), WTFMove(streamIds), { toRTCRtpTransceiverDirection(currentDirection) } } }; +} + +static Vector transceiverStatesFromWebRTCBin(GstElement* webrtcBin) +{ + Vector states; + GRefPtr transceivers; + g_signal_emit_by_name(webrtcBin, "get-transceivers", &transceivers.outPtr()); + GST_TRACE_OBJECT(webrtcBin, "Filling transceiver states for %u transceivers", transceivers ? transceivers->len : 0); + if (!transceivers || !transceivers->len) + return states; + + states.reserveInitialCapacity(transceivers->len); + for (unsigned i = 0; i < transceivers->len; i++) { + GstWebRTCRTPTransceiver* transceiver = g_array_index(transceivers.get(), GstWebRTCRTPTransceiver*, i); + auto state = toGStreamerMediaEndpointTransceiverState(webrtcBin, transceiver); + if (!state) + continue; + states.append(WTFMove(*state)); + } + + return states; +} + void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* description) { RefPtr initialDescription = description; @@ -458,6 +575,19 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* } } +#ifndef GST_DISABLE_GST_DEBUG + auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".setLocalDescription"_s); + GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data()); +#endif + + auto rtcTransceiverStates = transceiverStatesFromWebRTCBin(m_webrtcBin.get()); + auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState { + auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr { + return &mediaStreamFromRTCStream(id); + }); + return { WTFMove(state.mid), WTFMove(streams), state.firedDirection }; + }); + GRefPtr transport; g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr); @@ -468,7 +598,7 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* maxMessageSize = static_cast(maxMessageSizeValue); } - m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize); + m_peerConnectionBackend.setLocalDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize); }, [protectedThis = Ref(*this), this](const GError* error) { if (protectedThis->isStopped()) return; @@ -483,6 +613,22 @@ void GStreamerMediaEndpoint::doSetLocalDescription(const RTCSessionDescription* }); } +void GStreamerMediaEndpoint::setTransceiverCodecPreferences(const GstSDPMedia& media, guint transceiverIdx) +{ + auto direction = getDirectionFromSDPMedia(&media); + if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) + return; + + GRefPtr rtcTransceiver; + g_signal_emit_by_name(m_webrtcBin.get(), "get-transceiver", transceiverIdx, &rtcTransceiver.outPtr()); + if (!rtcTransceiver) + return; + + auto caps = capsFromSDPMedia(&media); + GST_TRACE_OBJECT(m_webrtcBin.get(), "Setting codec-preferences to %" GST_PTR_FORMAT, caps.get()); + g_object_set(rtcTransceiver.get(), "codec-preferences", caps.get(), nullptr); +} + void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& description) { auto initialSDP = description.sdp().isolatedCopy(); @@ -494,17 +640,9 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& unsigned numberOfMedias = gst_sdp_message_medias_len(&message); for (unsigned i = 0; i < numberOfMedias; i++) { const auto* media = gst_sdp_message_get_media(&message, i); - auto direction = getDirectionFromSDPMedia(media); - if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) + if (UNLIKELY(!media)) continue; - - GRefPtr rtcTransceiver; - g_signal_emit_by_name(m_webrtcBin.get(), "get-transceiver", i, &rtcTransceiver.outPtr()); - if (!rtcTransceiver) - continue; - - auto caps = capsFromSDPMedia(media); - g_object_set(rtcTransceiver.get(), "codec-preferences", caps.get(), nullptr); + setTransceiverCodecPreferences(*media, i); } }, [protectedThis = Ref(*this), this, initialSDP = WTFMove(initialSDP), localDescriptionSdp = WTFMove(localDescriptionSdp), localDescriptionSdpType = WTFMove(localDescriptionSdpType)](const GstSDPMessage& message) { if (protectedThis->isStopped()) @@ -540,6 +678,19 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& } } +#ifndef GST_DISABLE_GST_DEBUG + auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".setRemoteDescription"_s); + GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data()); +#endif + + auto rtcTransceiverStates = transceiverStatesFromWebRTCBin(m_webrtcBin.get()); + auto transceiverStates = WTF::map(rtcTransceiverStates, [this](auto& state) -> PeerConnectionBackend::TransceiverState { + auto streams = WTF::map(state.receiverStreamIds, [this](auto& id) -> RefPtr { + return &mediaStreamFromRTCStream(id); + }); + return { WTFMove(state.mid), WTFMove(streams), state.firedDirection }; + }); + GRefPtr transport; g_object_get(m_webrtcBin.get(), "sctp-transport", &transport.outPtr(), nullptr); @@ -550,7 +701,7 @@ void GStreamerMediaEndpoint::doSetRemoteDescription(const RTCSessionDescription& maxMessageSize = static_cast(maxMessageSizeValue); } - m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), { }, transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize); + m_peerConnectionBackend.setRemoteDescriptionSucceeded(WTFMove(descriptions), WTFMove(transceiverStates), transport ? makeUnique(WTFMove(transport)) : nullptr, maxMessageSize); }, [protectedThis = Ref(*this), this](const GError* error) { if (protectedThis->isStopped()) return; @@ -576,6 +727,8 @@ WEBKIT_DEFINE_ASYNC_DATA_STRUCT(SetDescriptionCallData) void GStreamerMediaEndpoint::setDescription(const RTCSessionDescription* description, DescriptionType descriptionType, Function&& preProcessCallback, Function&& successCallback, Function&& failureCallback) { + GST_DEBUG_OBJECT(m_webrtcBin.get(), "Setting %s description", descriptionType == DescriptionType::Local ? "local" : "remote"); + GUniqueOutPtr message; auto sdpType = RTCSdpType::Offer; @@ -990,12 +1143,9 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data) GST_WARNING_OBJECT(m_pipeline.get(), "SDP media for transceiver %u not found, skipping incoming track setup", mLineIndex); return; } - transceiver = &m_peerConnectionBackend.newRemoteTransceiver(makeUnique(WTFMove(rtcTransceiver)), data.type, trackIdFromSDPMedia(*media)); } - m_peerConnectionBackend.addPendingTrackEvent({ Ref(transceiver->receiver()), Ref(transceiver->receiver().track()), { }, Ref(*transceiver) }); - auto mediaStreamBin = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_pipeline.get()), data.mediaStreamBinName.ascii().data())); auto& track = transceiver->receiver().track(); auto& source = track.privateTrack().source(); @@ -1009,22 +1159,32 @@ void GStreamerMediaEndpoint::connectIncomingTrack(WebRTCTrackData& data) return; } - auto& mediaStream = mediaStreamFromRTCStream(data.mediaStreamId); - mediaStream.addTrackFromPlatform(track); + m_pendingIncomingMediaStreamIDs.append(data.mediaStreamId); - for (auto& processor : m_trackProcessors.values()) { - if (!processor->isReady()) - return; + unsigned totalExpectedMediaTracks = 0; + for (unsigned i = 0; i < gst_sdp_message_medias_len(description->sdp); i++) { + const auto media = gst_sdp_message_get_media(description->sdp, i); + const char* mediaType = gst_sdp_media_get_media(media); + if (g_str_equal(mediaType, "audio") || g_str_equal(mediaType, "video")) + totalExpectedMediaTracks++; } - GST_DEBUG_OBJECT(m_pipeline.get(), "Incoming streams gathered, now dispatching track events"); - m_peerConnectionBackend.dispatchPendingTrackEvents(mediaStream); - gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING); + GST_DEBUG_OBJECT(m_pipeline.get(), "Expecting %u media tracks", totalExpectedMediaTracks); + if (m_pendingIncomingMediaStreamIDs.size() < totalExpectedMediaTracks) { + GST_DEBUG_OBJECT(m_pipeline.get(), "Only %zu track(s) received so far", m_pendingIncomingMediaStreamIDs.size()); + return; + } -#ifndef GST_DISABLE_GST_DEBUG - auto dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), ".connected-"_s, data.mediaStreamId); - GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data()); -#endif + for (auto& mediaStreamID : m_pendingIncomingMediaStreamIDs) { + auto& mediaStream = mediaStreamFromRTCStream(mediaStreamID); + GST_DEBUG_OBJECT(m_pipeline.get(), "Incoming stream %s ready, notifying observers", mediaStreamID.ascii().data()); + mediaStream.privateStream().forEachTrack([](auto& track) { + GST_DEBUG("Incoming stream has track %s", track.id().ascii().data()); + track.dataFlowStarted(); + }); + } + + m_pendingIncomingMediaStreamIDs.clear(); gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING); } diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h index f33550eef92b4..7461e74bae71f 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerMediaEndpoint.h @@ -162,6 +162,8 @@ class GStreamerMediaEndpoint : public ThreadSafeRefCounted isIceGatheringComplete(const String& currentLocalDescription); + void setTransceiverCodecPreferences(const GstSDPMedia&, guint transceiverIdx); + #if !RELEASE_LOG_DISABLED void gatherStatsForLogging(); void startLoggingStats(); @@ -204,6 +206,8 @@ class GStreamerMediaEndpoint : public ThreadSafeRefCounted m_ssrcGenerator; HashMap, RefPtr> m_trackProcessors; + + Vector m_pendingIncomingMediaStreamIDs; }; } // namespace WebCore diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp index 453040150ae5b..b73079f67fa28 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.cpp @@ -323,24 +323,6 @@ void GStreamerPeerConnectionBackend::collectTransceivers() m_endpoint->collectTransceivers(); } -void GStreamerPeerConnectionBackend::addPendingTrackEvent(PendingTrackEvent&& event) -{ - m_pendingTrackEvents.append(WTFMove(event)); -} - -void GStreamerPeerConnectionBackend::dispatchPendingTrackEvents(MediaStream& mediaStream) -{ - auto events = WTFMove(m_pendingTrackEvents); - for (auto& event : events) { - Vector> pendingStreams; - pendingStreams.reserveInitialCapacity(1); - pendingStreams.uncheckedAppend(&mediaStream); - event.streams = WTFMove(pendingStreams); - - dispatchTrackEvent(event); - } -} - void GStreamerPeerConnectionBackend::removeTrack(RTCRtpSender& sender) { m_endpoint->removeTrack(backendFromRTPSender(sender)); diff --git a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h index 3ef339e3e41b8..cde33987becd7 100644 --- a/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h +++ b/Source/WebCore/Modules/mediastream/gstreamer/GStreamerPeerConnectionBackend.h @@ -98,9 +98,6 @@ class GStreamerPeerConnectionBackend final : public PeerConnectionBackend { void collectTransceivers() final; - void addPendingTrackEvent(PendingTrackEvent&&); - void dispatchPendingTrackEvents(MediaStream&); - bool isLocalDescriptionSet() const final { return m_isLocalDescriptionSet; } template @@ -120,8 +117,6 @@ class GStreamerPeerConnectionBackend final : public PeerConnectionBackend { bool m_isLocalDescriptionSet { false }; bool m_isRemoteDescriptionSet { false }; - Vector m_pendingTrackEvents; - bool m_isReconfiguring { false }; }; diff --git a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp index 757ffe5d78173..189e93ef0d3dd 100644 --- a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp +++ b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp @@ -110,7 +110,7 @@ void MediaStreamTrackPrivate::setContentHint(HintValue hintValue) { m_contentHint = hintValue; } - + bool MediaStreamTrackPrivate::muted() const { return m_source->muted(); @@ -121,6 +121,13 @@ bool MediaStreamTrackPrivate::isCaptureTrack() const return m_source->isCaptureSource(); } +void MediaStreamTrackPrivate::dataFlowStarted() +{ + forEachObserver([this](auto& observer) { + observer.dataFlowStarted(*this); + }); +} + void MediaStreamTrackPrivate::setEnabled(bool enabled) { if (m_isEnabled == enabled) diff --git a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h index 9ccf0fc68f453..739516955f7ea 100644 --- a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h +++ b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h @@ -59,6 +59,7 @@ class MediaStreamTrackPrivate final virtual void trackSettingsChanged(MediaStreamTrackPrivate&) = 0; virtual void trackEnabledChanged(MediaStreamTrackPrivate&) = 0; virtual void readyStateChanged(MediaStreamTrackPrivate&) { }; + virtual void dataFlowStarted(MediaStreamTrackPrivate&) { }; }; static Ref create(Ref&&, Ref&&); @@ -76,13 +77,14 @@ class MediaStreamTrackPrivate final enum class HintValue { Empty, Speech, Music, Motion, Detail, Text }; HintValue contentHint() const { return m_contentHint; } void setContentHint(HintValue); - + void startProducingData() { m_source->start(); } void stopProducingData() { m_source->stop(); } bool isProducingData() { return m_source->isProducingData(); } bool muted() const; void setMuted(bool muted) { m_source->setMuted(muted); } + void dataFlowStarted(); bool isCaptureTrack() const; diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp index 1fedcd9a55f2d..abf15eec90729 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp @@ -180,12 +180,6 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer, return GST_PAD_PROBE_OK; }), nullptr, nullptr); #endif - - auto& trackSource = m_track.source(); - if (!trackSource.isIncomingAudioSource() && !trackSource.isIncomingVideoSource()) - return; - - connectIncomingTrack(); } void connectIncomingTrack() @@ -369,6 +363,11 @@ class InternalSource final : public MediaStreamTrackPrivate::Observer, void trackSettingsChanged(MediaStreamTrackPrivate&) final { }; void readyStateChanged(MediaStreamTrackPrivate&) final { }; + void dataFlowStarted(MediaStreamTrackPrivate&) final + { + connectIncomingTrack(); + } + void trackEnded(MediaStreamTrackPrivate&) final { GST_INFO_OBJECT(m_src.get(), "Track ended"); @@ -1114,13 +1113,6 @@ void webkitMediaStreamSrcSignalEndOfStream(WebKitMediaStreamSrc* self) void webkitMediaStreamSrcCharacteristicsChanged(WebKitMediaStreamSrc* self) { GST_DEBUG_OBJECT(self, "MediaStream characteristics changed"); - for (auto& source : self->priv->sources) { - auto& trackSource = source->track().source(); - if (!trackSource.isIncomingAudioSource() && !trackSource.isIncomingVideoSource()) - continue; - - source->connectIncomingTrack(); - } } void webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream, bool isVideoPlayer) From 06c498c8e571a42eabb73214202804d658b100dc Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Wed, 7 Aug 2024 17:55:09 +0200 Subject: [PATCH 14/15] [WebRTC] Fix build with GCC 9 --- Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h b/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h index c991e7dd884f7..8b80cfb75d4e9 100644 --- a/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h +++ b/Source/WebCore/Modules/mediastream/RTCSctpTransportBackend.h @@ -54,6 +54,11 @@ inline bool operator==(const RTCSctpTransportBackend& a, const RTCSctpTransportB return a.backend() == b.backend(); } +inline bool operator!=(const RTCSctpTransportBackend& a, const RTCSctpTransportBackend& b) +{ + return !(a == b); +} + } // namespace WebCore #endif // ENABLE(WEB_RTC) From 6fe795e301369fda0a7c447e02e759cc4ec9bbd3 Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Fri, 30 Aug 2024 12:54:41 +0200 Subject: [PATCH 15/15] wip: do not set immediate-output=TRUE on westerossink --- .../platform/gstreamer/GStreamerQuirkWesteros.cpp | 11 ----------- .../platform/gstreamer/GStreamerQuirkWesteros.h | 1 - 2 files changed, 12 deletions(-) diff --git a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp index 0ef739ee4c562..50528cdc2cf8a 100644 --- a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp +++ b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.cpp @@ -51,17 +51,6 @@ GStreamerQuirkWesteros::GStreamerQuirkWesteros() } } -void GStreamerQuirkWesteros::configureElement(GstElement* element, const OptionSet& characteristics) -{ - if (!characteristics.contains(ElementRuntimeCharacteristics::IsMediaStream)) - return; - - if (!g_strcmp0(G_OBJECT_TYPE_NAME(G_OBJECT(element)), "GstWesterosSink") && gstObjectHasProperty(element, "immediate-output")) { - GST_INFO("Enable 'immediate-output' in WesterosSink"); - g_object_set(element, "immediate-output", TRUE, nullptr); - } -} - std::optional GStreamerQuirkWesteros::isHardwareAccelerated(GstElementFactory* factory) { if (g_str_has_prefix(GST_OBJECT_NAME(factory), "westeros")) diff --git a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h index 518e512ecafec..59b62a0505fe6 100644 --- a/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h +++ b/Source/WebCore/platform/gstreamer/GStreamerQuirkWesteros.h @@ -31,7 +31,6 @@ class GStreamerQuirkWesteros final : public GStreamerQuirk { GStreamerQuirkWesteros(); const char* identifier() final { return "Westeros"; } - void configureElement(GstElement*, const OptionSet&) final; std::optional isHardwareAccelerated(GstElementFactory*) final; private: