Bug 1813468 - Signal start and stop to MediaPipeline through state mirroring. r=bwc

Differential Revision: https://phabricator.services.mozilla.com/D168727
This commit is contained in:
Andreas Pehrson 2023-02-09 15:33:01 +00:00
parent 9d815d4c33
commit d1a36ee5d5
9 changed files with 92 additions and 82 deletions

View file

@ -6,7 +6,6 @@
#include "transport/logging.h"
#include "mozilla/dom/MediaStreamTrack.h"
#include "mozilla/dom/Promise.h"
#include "transportbridge/MediaPipeline.h"
#include "nsPIDOMWindow.h"
#include "PrincipalHandle.h"
#include "nsIPrincipal.h"
@ -118,6 +117,8 @@ RTCRtpReceiver::RTCRtpReceiver(
principalHandle, aPrivacy);
}
mPipeline->InitControl(this);
// Spec says remote tracks start out muted.
mTrackSource->SetMuted(true);
@ -622,9 +623,6 @@ void RTCRtpReceiver::UpdateTransport() {
}
void RTCRtpReceiver::UpdateConduit() {
mReceiving = false;
Stop();
if (mPipeline->mConduit->type() == MediaSessionConduit::VIDEO) {
UpdateVideoConduit();
} else {
@ -632,7 +630,6 @@ void RTCRtpReceiver::UpdateConduit() {
}
if ((mReceiving = GetJsepTransceiver().mRecvTrack.GetActive())) {
Start();
mHaveStartedReceiving = true;
}
}
@ -762,14 +759,8 @@ void RTCRtpReceiver::UpdateAudioConduit() {
}
void RTCRtpReceiver::Stop() {
if (mPipeline) {
mPipeline->Stop();
}
}
void RTCRtpReceiver::Start() {
mPipeline->Start();
mHaveStartedReceiving = true;
MOZ_ASSERT(mTransceiver->Stopped());
mReceiving = false;
}
bool RTCRtpReceiver::HasTrack(const dom::MediaStreamTrack* aTrack) const {

View file

@ -17,12 +17,12 @@
#include "mozilla/dom/RTCStatsReportBinding.h"
#include "PerformanceRecorder.h"
#include "RTCStatsReport.h"
#include "transportbridge/MediaPipeline.h"
#include <vector>
class nsPIDOMWindowInner;
namespace mozilla {
class MediaPipelineReceive;
class MediaSessionConduit;
class MediaTransportHandler;
class JsepTransceiver;
@ -38,7 +38,9 @@ struct RTCRtpContributingSource;
struct RTCRtpSynchronizationSource;
class RTCRtpTransceiver;
class RTCRtpReceiver : public nsISupports, public nsWrapperCache {
class RTCRtpReceiver : public nsISupports,
public nsWrapperCache,
public MediaPipelineReceiveControlInterface {
public:
RTCRtpReceiver(nsPIDOMWindowInner* aWindow, PrincipalPrivacy aPrivacy,
PeerConnectionImpl* aPc,
@ -73,8 +75,8 @@ class RTCRtpReceiver : public nsISupports, public nsWrapperCache {
void Shutdown();
void BreakCycles();
// Terminal state, reached through stopping RTCRtpTransceiver.
void Stop();
void Start();
bool HasTrack(const dom::MediaStreamTrack* aTrack) const;
void SyncToJsep(JsepTransceiver& aJsepTransceiver) const;
void SyncFromJsep(const JsepTransceiver& aJsepTransceiver);
@ -132,7 +134,7 @@ class RTCRtpReceiver : public nsISupports, public nsWrapperCache {
AbstractCanonical<Maybe<RtpRtcpConfig>>* CanonicalVideoRtpRtcpConfig() {
return &mVideoRtpRtcpConfig;
}
AbstractCanonical<bool>* CanonicalReceiving() { return &mReceiving; }
AbstractCanonical<bool>* CanonicalReceiving() override { return &mReceiving; }
private:
virtual ~RTCRtpReceiver();
@ -171,9 +173,7 @@ class RTCRtpReceiver : public nsISupports, public nsWrapperCache {
Canonical<Ssrc> mSsrc;
Canonical<Ssrc> mVideoRtxSsrc;
Canonical<RtpExtList> mLocalRtpExtensions;
Canonical<std::vector<AudioCodecConfig>> mAudioCodecs;
Canonical<std::vector<VideoCodecConfig>> mVideoCodecs;
Canonical<Maybe<RtpRtcpConfig>> mVideoRtpRtcpConfig;
Canonical<bool> mReceiving;

View file

@ -7,7 +7,6 @@
#include "mozilla/dom/MediaStreamTrack.h"
#include "mozilla/dom/Promise.h"
#include "mozilla/glean/GleanMetrics.h"
#include "transportbridge/MediaPipeline.h"
#include "nsPIDOMWindow.h"
#include "nsString.h"
#include "mozilla/dom/VideoStreamTrack.h"
@ -70,6 +69,7 @@ RTCRtpSender::RTCRtpSender(nsPIDOMWindowInner* aWindow, PeerConnectionImpl* aPc,
mPipeline = new MediaPipelineTransmit(
mPc->GetHandle(), aTransportHandler, aCallThread, aStsThread,
aConduit->type() == MediaSessionConduit::VIDEO, aConduit);
mPipeline->InitControl(this);
if (aConduit->type() == MediaSessionConduit::AUDIO) {
mDtmf = new RTCDTMFSender(aWindow, mTransceiver);
@ -1203,6 +1203,8 @@ void RTCRtpSender::MaybeUpdateConduit() {
return;
}
bool wasTransmitting = mTransmitting;
if (mPipeline->mConduit->type() == MediaSessionConduit::VIDEO) {
Maybe<VideoConfig> newConfig = GetNewVideoConfig();
if (newConfig.isSome()) {
@ -1214,6 +1216,12 @@ void RTCRtpSender::MaybeUpdateConduit() {
ApplyAudioConfig(*newConfig);
}
}
if (!mSenderTrack && !wasTransmitting && mTransmitting) {
MOZ_LOG(gSenderLog, LogLevel::Debug,
("%s[%s]: %s Starting transmit conduit without send track!",
mPc->GetHandle().c_str(), GetMid().c_str(), __FUNCTION__));
}
}
void RTCRtpSender::SyncFromJsep(const JsepTransceiver& aJsepTransceiver) {
@ -1451,8 +1459,6 @@ void RTCRtpSender::ApplyVideoConfig(const VideoConfig& aConfig) {
if (aConfig.mVideoCodec.isSome()) {
MOZ_ASSERT(aConfig.mSsrcs.size() == aConfig.mVideoCodec->mEncodings.size());
}
mTransmitting = false;
Stop();
mSsrcs = aConfig.mSsrcs;
mCname = aConfig.mCname;
@ -1463,14 +1469,14 @@ void RTCRtpSender::ApplyVideoConfig(const VideoConfig& aConfig) {
mVideoRtpRtcpConfig = aConfig.mVideoRtpRtcpConfig;
mVideoCodecMode = aConfig.mVideoCodecMode;
if ((mTransmitting = aConfig.mTransmitting)) {
Start();
}
mTransmitting = aConfig.mTransmitting;
}
void RTCRtpSender::ApplyAudioConfig(const AudioConfig& aConfig) {
mTransmitting = false;
Stop();
if (mDtmf) {
mDtmf->StopPlayout();
}
mSsrcs = aConfig.mSsrcs;
mCname = aConfig.mCname;
@ -1482,25 +1488,15 @@ void RTCRtpSender::ApplyAudioConfig(const AudioConfig& aConfig) {
mDtmf->SetPayloadType(aConfig.mDtmfPt, aConfig.mDtmfFreq);
}
if ((mTransmitting = aConfig.mTransmitting)) {
Start();
}
mTransmitting = aConfig.mTransmitting;
}
void RTCRtpSender::Stop() {
mPipeline->Stop();
MOZ_ASSERT(mTransceiver->Stopped());
if (mDtmf) {
mDtmf->StopPlayout();
}
}
void RTCRtpSender::Start() {
if (!mSenderTrack) {
MOZ_LOG(gSenderLog, LogLevel::Debug,
("%s[%s]: %s Starting transmit conduit without send track!",
mPc->GetHandle().c_str(), GetMid().c_str(), __FUNCTION__));
}
mPipeline->Start();
mTransmitting = false;
}
bool RTCRtpSender::HasTrack(const dom::MediaStreamTrack* aTrack) const {

View file

@ -18,11 +18,11 @@
#include "mozilla/dom/RTCRtpParametersBinding.h"
#include "RTCStatsReport.h"
#include "jsep/JsepTrack.h"
#include "transportbridge/MediaPipeline.h"
class nsPIDOMWindowInner;
namespace mozilla {
class MediaPipelineTransmit;
class MediaSessionConduit;
class MediaTransportHandler;
class JsepTransceiver;
@ -36,7 +36,9 @@ class RTCDtlsTransport;
class RTCDTMFSender;
class RTCRtpTransceiver;
class RTCRtpSender : public nsISupports, public nsWrapperCache {
class RTCRtpSender : public nsISupports,
public nsWrapperCache,
public MediaPipelineTransmitControlInterface {
public:
RTCRtpSender(nsPIDOMWindowInner* aWindow, PeerConnectionImpl* aPc,
MediaTransportHandler* aTransportHandler,
@ -85,8 +87,8 @@ class RTCRtpSender : public nsISupports, public nsWrapperCache {
void SetTrack(const RefPtr<MediaStreamTrack>& aTrack);
void Shutdown();
void BreakCycles();
// Terminal state, reached through stopping RTCRtpTransceiver.
void Stop();
void Start();
bool HasTrack(const dom::MediaStreamTrack* aTrack) const;
bool IsMyPc(const PeerConnectionImpl* aPc) const { return mPc.get() == aPc; }
RefPtr<MediaPipelineTransmit> GetPipeline() const;
@ -120,7 +122,9 @@ class RTCRtpSender : public nsISupports, public nsWrapperCache {
return &mVideoCodecMode;
}
AbstractCanonical<std::string>* CanonicalCname() { return &mCname; }
AbstractCanonical<bool>* CanonicalTransmitting() { return &mTransmitting; }
AbstractCanonical<bool>* CanonicalTransmitting() override {
return &mTransmitting;
}
bool HasPendingSetParameters() const { return mPendingParameters.isSome(); }
void InvalidateLastReturnedParameters() {

View file

@ -865,8 +865,6 @@ void RTCRtpTransceiver::StopImpl() {
if (mStopped) {
return;
}
mSender->Stop();
mReceiver->Stop();
if (mCallWrapper) {
auto conduit = std::move(mConduit);
@ -883,6 +881,10 @@ void RTCRtpTransceiver::StopImpl() {
}
mStopped = true;
mCurrentDirection.SetNull();
mSender->Stop();
mReceiver->Stop();
auto self = nsMainThreadPtrHandle<RTCRtpTransceiver>(
new nsMainThreadPtrHolder<RTCRtpTransceiver>(
"RTCRtpTransceiver::StopImpl::self", this, false));

View file

@ -238,6 +238,9 @@ class AudioProxyThread {
UniquePtr<AudioConverter> mAudioConverter;
};
#define INIT_MIRROR(name, val) \
name(AbstractThread::MainThread(), val, "MediaPipeline::" #name " (Mirror)")
MediaPipeline::MediaPipeline(const std::string& aPc,
RefPtr<MediaTransportHandler> aTransportHandler,
DirectionType aDirection,
@ -248,7 +251,7 @@ MediaPipeline::MediaPipeline(const std::string& aPc,
mDirection(aDirection),
mCallThread(std::move(aCallThread)),
mStsThread(std::move(aStsThread)),
mActive(false, "MediaPipeline::mActive"),
INIT_MIRROR(mActive, false),
mLevel(0),
mTransportHandler(std::move(aTransportHandler)),
mRtpPacketsSent(0),
@ -262,25 +265,17 @@ MediaPipeline::MediaPipeline(const std::string& aPc,
mRtpHeaderExtensionMap(new webrtc::RtpHeaderExtensionMap()),
mPacketDumper(PacketDumper::GetPacketDumper(mPc)) {}
#undef INIT_MIRROR
MediaPipeline::~MediaPipeline() {
MOZ_LOG(gMediaPipelineLog, LogLevel::Info,
("Destroying MediaPipeline: %s", mDescription.c_str()));
}
void MediaPipeline::Start() {
MOZ_ASSERT(NS_IsMainThread());
mActive = true;
}
void MediaPipeline::Stop() {
MOZ_ASSERT(NS_IsMainThread());
mActive = false;
}
void MediaPipeline::Shutdown() {
MOZ_ASSERT(NS_IsMainThread());
Stop();
mActive.DisconnectIfConnected();
RUN_ON_THREAD(mStsThread,
WrapRunnable(RefPtr<MediaPipeline>(this),
&MediaPipeline::DetachTransport_s),
@ -811,9 +806,13 @@ MediaPipelineTransmit::~MediaPipelineTransmit() {
MOZ_ASSERT(!mDomTrack.Ref());
}
void MediaPipelineTransmit::InitControl(
MediaPipelineTransmitControlInterface* aControl) {
mActive.Connect(aControl->CanonicalTransmitting());
}
void MediaPipelineTransmit::Shutdown() {
MediaPipeline::Shutdown();
MOZ_ASSERT(!mActive);
mWatchManager.Shutdown();
if (mDomTrack.Ref()) {
mDomTrack.Ref()->RemovePrincipalChangeObserver(this);
@ -1221,6 +1220,11 @@ MediaPipelineReceive::MediaPipelineReceive(
MediaPipelineReceive::~MediaPipelineReceive() = default;
void MediaPipelineReceive::InitControl(
MediaPipelineReceiveControlInterface* aControl) {
mActive.Connect(aControl->CanonicalReceiving());
}
void MediaPipelineReceive::Shutdown() {
MOZ_ASSERT(NS_IsMainThread());
MediaPipeline::Shutdown();
@ -1236,10 +1240,10 @@ void MediaPipelineReceive::UpdateMaybeTrackNeedsUnmute() {
void MediaPipelineReceive::OnRtpPacketReceived() {
ASSERT_ON_THREAD(mStsThread);
if (mMaybeTrackNeedsUnmute) {
bool needsUnmute = mMaybeTrackNeedsUnmute.exchange(false);
if (needsUnmute) {
mUnmuteEvent.Notify();
}
mMaybeTrackNeedsUnmute = false;
}
class MediaPipelineReceiveAudio::PipelineListener

View file

@ -49,6 +49,14 @@ class MediaStreamTrack;
struct RTCRTPContributingSourceStats;
} // namespace dom
struct MediaPipelineReceiveControlInterface {
virtual AbstractCanonical<bool>* CanonicalReceiving() = 0;
};
struct MediaPipelineTransmitControlInterface {
virtual AbstractCanonical<bool>* CanonicalTransmitting() = 0;
};
// A class that represents the pipeline of audio and video
// The dataflow looks like:
//
@ -89,9 +97,6 @@ class MediaPipeline : public sigslot::has_slots<> {
RefPtr<nsISerialEventTarget> aStsThread,
RefPtr<MediaSessionConduit> aConduit);
void Start();
void Stop();
void SetLevel(size_t aLevel) { mLevel = aLevel; }
// Main thread shutdown.
@ -198,7 +203,7 @@ class MediaPipeline : public sigslot::has_slots<> {
protected:
// True if we should be actively transmitting or receiving data. Main thread
// only.
Watchable<bool> mActive;
Mirror<bool> mActive;
Atomic<size_t> mLevel;
std::string mTransportId;
const RefPtr<MediaTransportHandler> mTransportHandler;
@ -258,6 +263,8 @@ class MediaPipelineTransmit
RefPtr<nsISerialEventTarget> aStsThread, bool aIsVideo,
RefPtr<MediaSessionConduit> aConduit);
void InitControl(MediaPipelineTransmitControlInterface* aControl);
void Shutdown() override;
bool Transmitting() const;
@ -349,6 +356,8 @@ class MediaPipelineReceive : public MediaPipeline {
RefPtr<nsISerialEventTarget> aStsThread,
RefPtr<MediaSessionConduit> aConduit);
void InitControl(MediaPipelineReceiveControlInterface* aControl);
// Called when ALPN is negotiated and is requesting privacy, so receive
// pipelines do not enter data into the graph under a content principal.
virtual void OnPrivacyRequested_s() = 0;

View file

@ -8,6 +8,7 @@
#define MEDIA_WEBRTC_SIGNALING_GTEST_CANONICALS_H_
#include "MediaConduitControl.h"
#include "MediaPipeline.h"
#include "WaitFor.h"
namespace mozilla {
@ -63,6 +64,8 @@ class ConcreteCanonicals {
class ConcreteControl : public AudioConduitControlInterface,
public VideoConduitControlInterface,
public MediaPipelineReceiveControlInterface,
public MediaPipelineTransmitControlInterface,
private ConcreteCanonicals {
private:
RefPtr<nsISerialEventTarget> mTarget;
@ -79,7 +82,9 @@ class ConcreteControl : public AudioConduitControlInterface,
}
// MediaConduitControlInterface
// -- MediaPipelineReceiveControlInterface
AbstractCanonical<bool>* CanonicalReceiving() override { return &mReceiving; }
// -- MediaPipelineTransmitControlInterface
AbstractCanonical<bool>* CanonicalTransmitting() override {
return &mTransmitting;
}

View file

@ -282,15 +282,10 @@ class TestAgent {
void Stop() {
MOZ_MTLOG(ML_DEBUG, "Stopping");
if (audio_pipeline_) {
audio_pipeline_->Stop();
}
if (audio_conduit_) {
control_.Update([](auto& aControl) {
aControl.mTransmitting = false;
aControl.mReceiving = false;
});
}
control_.Update([](auto& aControl) {
aControl.mTransmitting = false;
aControl.mReceiving = false;
});
}
void Shutdown_s() { transport_->Shutdown(); }
@ -363,17 +358,18 @@ class TestAgentSend : public TestAgent {
virtual void CreatePipeline(const std::string& aTransportId) {
std::string test_pc;
RefPtr<MediaPipelineTransmit> audio_pipeline = new MediaPipelineTransmit(
auto audio_pipeline = MakeRefPtr<MediaPipelineTransmit>(
test_pc, transport_, AbstractThread::MainThread(),
test_utils->sts_target(), false, audio_conduit_);
Unused << WaitFor(InvokeAsync(call_->mCallThread, __func__, [&] {
audio_pipeline->InitControl(&control_);
return GenericPromise::CreateAndResolve(true, __func__);
}));
audio_pipeline->SetSendTrackOverride(audio_track_);
audio_pipeline->Start();
control_.Update([](auto& aControl) { aControl.mTransmitting = true; });
audio_pipeline->UpdateTransport_m(aTransportId, nullptr);
audio_pipeline_ = audio_pipeline;
audio_pipeline_->UpdateTransport_m(aTransportId, nullptr);
}
};
@ -391,16 +387,19 @@ class TestAgentReceive : public TestAgent {
virtual void CreatePipeline(const std::string& aTransportId) {
std::string test_pc;
audio_pipeline_ = new MediaPipelineReceiveAudio(
auto audio_pipeline = MakeRefPtr<MediaPipelineReceiveAudio>(
test_pc, transport_, AbstractThread::MainThread(),
test_utils->sts_target(),
static_cast<AudioSessionConduit*>(audio_conduit_.get()), nullptr,
TrackingId(), PRINCIPAL_HANDLE_NONE, PrincipalPrivacy::NonPrivate);
Unused << WaitFor(InvokeAsync(call_->mCallThread, __func__, [&] {
audio_pipeline->InitControl(&control_);
return GenericPromise::CreateAndResolve(true, __func__);
}));
audio_pipeline_->Start();
control_.Update([](auto& aControl) { aControl.mReceiving = true; });
audio_pipeline_->UpdateTransport_m(aTransportId, std::move(bundle_filter_));
audio_pipeline->UpdateTransport_m(aTransportId, std::move(bundle_filter_));
audio_pipeline_ = audio_pipeline;
}
void SetBundleFilter(UniquePtr<MediaPipelineFilter>&& filter) {