Bug 1827703 - Improve integration with OpenH264 decoder. r=media-playback-reviewers,padenot

Prefer timestamp from the OpenH264 decoder if available.

This patch bumps the API version for the GMP plugin API. The OpenH264
library takes advatange of this. It also adds a few quality of life
options. One request the GMP library logging be turned on via the
"GMPLibrary" log module. One can toggle between single and
multi-threaded decoding via media.gmp.decoder.multithreaded. One can
toggle between single or batch decoding via
media.gmp.decoder.decode_batch.

Provided the OpenH264 library supports this, it will now provide the
adjusted presentation timestamp from the decoder. This is necessary for
encodings with B frames that may be out of order. This corresponds to
the SBufferInfo::uiOutYuvTimestamp from the library. If it is not
available, we will default to our historical behaviour and use the
original presentation timestamp.

Additionally, we now assume that H264 frames may also be provided out of
order, and we provide a reorder queue to buffer the input similar to the
other H264 decoders such as Apple's and Widevine's. This will ensure
that regardless of the plugin output, we will provide any necessary
reordering.

Differential Revision: https://phabricator.services.mozilla.com/D175281
This commit is contained in:
Andrew Osmond 2023-04-18 14:47:42 +00:00
parent 6969581510
commit 193c21ac76
15 changed files with 201 additions and 44 deletions

View file

@ -7,11 +7,14 @@
#define DOM_MEDIA_GMPLOG_H_ #define DOM_MEDIA_GMPLOG_H_
#include "content_decryption_module.h" #include "content_decryption_module.h"
#include "gmp-video-codec.h"
#include "mozilla/Logging.h" #include "mozilla/Logging.h"
namespace mozilla { namespace mozilla {
extern LogModule* GetGMPLog(); extern LogModule* GetGMPLog();
extern LogModule* GetGMPLibraryLog();
extern GMPLogLevel GetGMPLibraryLogLevel();
#define GMP_LOG_ERROR(msg, ...) \ #define GMP_LOG_ERROR(msg, ...) \
MOZ_LOG(GetGMPLog(), LogLevel::Error, (msg, ##__VA_ARGS__)) MOZ_LOG(GetGMPLog(), LogLevel::Error, (msg, ##__VA_ARGS__))

View file

@ -44,6 +44,11 @@ struct ParamTraits<GMPVideoCodecMode>
: public ContiguousEnumSerializer<GMPVideoCodecMode, kGMPRealtimeVideo, : public ContiguousEnumSerializer<GMPVideoCodecMode, kGMPRealtimeVideo,
kGMPCodecModeInvalid> {}; kGMPCodecModeInvalid> {};
template <>
struct ParamTraits<GMPLogLevel>
: public ContiguousEnumSerializerInclusive<GMPLogLevel, kGMPLogDefault,
kGMPLogInvalid> {};
template <> template <>
struct ParamTraits<GMPBufferType> struct ParamTraits<GMPBufferType>
: public ContiguousEnumSerializer<GMPBufferType, GMP_BufferSingle, : public ContiguousEnumSerializer<GMPBufferType, GMP_BufferSingle,
@ -113,12 +118,15 @@ struct ParamTraits<GMPVideoCodec> {
WriteParam(aWriter, aParam.mSimulcastStream[i]); WriteParam(aWriter, aParam.mSimulcastStream[i]);
} }
WriteParam(aWriter, aParam.mMode); WriteParam(aWriter, aParam.mMode);
WriteParam(aWriter, aParam.mUseThreadedDecode);
WriteParam(aWriter, aParam.mLogLevel);
} }
static bool Read(MessageReader* aReader, paramType* aResult) { static bool Read(MessageReader* aReader, paramType* aResult) {
// NOTE: make sure this matches any versions supported // NOTE: make sure this matches any versions supported
if (!ReadParam(aReader, &(aResult->mGMPApiVersion)) || if (!ReadParam(aReader, &(aResult->mGMPApiVersion)) ||
aResult->mGMPApiVersion != kGMPVersion33) { (aResult->mGMPApiVersion != kGMPVersion33 &&
aResult->mGMPApiVersion != kGMPVersion34)) {
return false; return false;
} }
if (!ReadParam(aReader, &(aResult->mCodecType))) { if (!ReadParam(aReader, &(aResult->mCodecType))) {
@ -161,7 +169,9 @@ struct ParamTraits<GMPVideoCodec> {
} }
} }
if (!ReadParam(aReader, &(aResult->mMode))) { if (!ReadParam(aReader, &(aResult->mMode)) ||
!ReadParam(aReader, &(aResult->mUseThreadedDecode)) ||
!ReadParam(aReader, &(aResult->mLogLevel))) {
return false; return false;
} }

View file

@ -44,6 +44,29 @@ LogModule* GetGMPLog() {
return sLog; return sLog;
} }
LogModule* GetGMPLibraryLog() {
static LazyLogModule sLog("GMPLibrary");
return sLog;
}
GMPLogLevel GetGMPLibraryLogLevel() {
switch (GetGMPLibraryLog()->Level()) {
case LogLevel::Disabled:
return kGMPLogQuiet;
case LogLevel::Error:
return kGMPLogError;
case LogLevel::Warning:
return kGMPLogWarning;
case LogLevel::Info:
return kGMPLogInfo;
case LogLevel::Debug:
return kGMPLogDebug;
case LogLevel::Verbose:
return kGMPLogDetail;
}
return kGMPLogInvalid;
}
#ifdef __CLASS__ #ifdef __CLASS__
# undef __CLASS__ # undef __CLASS__
#endif #endif

View file

@ -8,6 +8,7 @@
#include "GMPContentParent.h" #include "GMPContentParent.h"
#include "GMPCrashHelper.h" #include "GMPCrashHelper.h"
#include "gmp-video-codec.h"
#include "mozIGeckoMediaPluginService.h" #include "mozIGeckoMediaPluginService.h"
#include "mozilla/Atomics.h" #include "mozilla/Atomics.h"
#include "mozilla/gmp/GMPTypes.h" #include "mozilla/gmp/GMPTypes.h"
@ -32,6 +33,8 @@ class GMPCrashHelper;
class MediaResult; class MediaResult;
extern LogModule* GetGMPLog(); extern LogModule* GetGMPLog();
extern LogModule* GetGMPLibraryLog();
extern GMPLogLevel GetGMPLibraryLogLevel();
namespace gmp { namespace gmp {

View file

@ -662,8 +662,8 @@ void GeckoMediaPluginServiceParent::SendFlushFOGData(
ipc::RejectCallback&&>( ipc::RejectCallback&&>(
"GMPParent::SendFlushFOGData", gmp, "GMPParent::SendFlushFOGData", gmp,
static_cast<void (GMPParent::*)( static_cast<void (GMPParent::*)(
mozilla::ipc::ResolveCallback<ipc::ByteBuf> && aResolve, mozilla::ipc::ResolveCallback<ipc::ByteBuf>&& aResolve,
mozilla::ipc::RejectCallback && aReject)>( mozilla::ipc::RejectCallback&& aReject)>(
&GMPParent::SendFlushFOGData), &GMPParent::SendFlushFOGData),
[promise](ipc::ByteBuf&& aValue) { [promise](ipc::ByteBuf&& aValue) {
@ -700,8 +700,8 @@ GeckoMediaPluginServiceParent::TestTriggerMetrics() {
ipc::RejectCallback&&>( ipc::RejectCallback&&>(
"GMPParent::SendTestTriggerMetrics", gmp, "GMPParent::SendTestTriggerMetrics", gmp,
static_cast<void (GMPParent::*)( static_cast<void (GMPParent::*)(
mozilla::ipc::ResolveCallback<bool> && aResolve, mozilla::ipc::ResolveCallback<bool>&& aResolve,
mozilla::ipc::RejectCallback && aReject)>( mozilla::ipc::RejectCallback&& aReject)>(
&PGMPParent::SendTestTriggerMetrics), &PGMPParent::SendTestTriggerMetrics),
[promise](bool aValue) { [promise](bool aValue) {

View file

@ -63,6 +63,7 @@ struct GMPVideoi420FrameData {
int32_t mWidth; int32_t mWidth;
int32_t mHeight; int32_t mHeight;
uint64_t mTimestamp; // microseconds uint64_t mTimestamp; // microseconds
uint64_t? mUpdatedTimestamp; // microseconds
uint64_t mDuration; // microseconds uint64_t mDuration; // microseconds
}; };

View file

@ -283,9 +283,19 @@ void GMPVideoDecoderParent::ActorDestroy(ActorDestroyReason aWhy) {
mozilla::ipc::IPCResult GMPVideoDecoderParent::RecvDecoded( mozilla::ipc::IPCResult GMPVideoDecoderParent::RecvDecoded(
const GMPVideoi420FrameData& aDecodedFrame) { const GMPVideoi420FrameData& aDecodedFrame) {
--mFrameCount; --mFrameCount;
GMP_LOG_VERBOSE("GMPVideoDecoderParent[%p]::RecvDecoded() timestamp=%" PRId64 if (aDecodedFrame.mUpdatedTimestamp() &&
" frameCount=%d", aDecodedFrame.mUpdatedTimestamp().value() != aDecodedFrame.mTimestamp()) {
this, aDecodedFrame.mTimestamp(), mFrameCount); GMP_LOG_VERBOSE(
"GMPVideoDecoderParent[%p]::RecvDecoded() timestamp=[%" PRId64
" -> %" PRId64 "] frameCount=%d",
this, aDecodedFrame.mTimestamp(),
aDecodedFrame.mUpdatedTimestamp().value(), mFrameCount);
} else {
GMP_LOG_VERBOSE(
"GMPVideoDecoderParent[%p]::RecvDecoded() timestamp=%" PRId64
" frameCount=%d",
this, aDecodedFrame.mTimestamp(), mFrameCount);
}
if (mCallback) { if (mCallback) {
if (GMPVideoi420FrameImpl::CheckFrameData(aDecodedFrame)) { if (GMPVideoi420FrameImpl::CheckFrameData(aDecodedFrame)) {

View file

@ -28,6 +28,7 @@ GMPVideoi420FrameImpl::GMPVideoi420FrameImpl(
mWidth(aFrameData.mWidth()), mWidth(aFrameData.mWidth()),
mHeight(aFrameData.mHeight()), mHeight(aFrameData.mHeight()),
mTimestamp(aFrameData.mTimestamp()), mTimestamp(aFrameData.mTimestamp()),
mUpdatedTimestamp(aFrameData.mUpdatedTimestamp()),
mDuration(aFrameData.mDuration()) { mDuration(aFrameData.mDuration()) {
MOZ_ASSERT(aHost); MOZ_ASSERT(aHost);
} }
@ -41,6 +42,7 @@ bool GMPVideoi420FrameImpl::InitFrameData(GMPVideoi420FrameData& aFrameData) {
aFrameData.mWidth() = mWidth; aFrameData.mWidth() = mWidth;
aFrameData.mHeight() = mHeight; aFrameData.mHeight() = mHeight;
aFrameData.mTimestamp() = mTimestamp; aFrameData.mTimestamp() = mTimestamp;
aFrameData.mUpdatedTimestamp() = mUpdatedTimestamp;
aFrameData.mDuration() = mDuration; aFrameData.mDuration() = mDuration;
return true; return true;
} }
@ -158,6 +160,7 @@ GMPErr GMPVideoi420FrameImpl::CreateEmptyFrame(int32_t aWidth, int32_t aHeight,
mWidth = aWidth; mWidth = aWidth;
mHeight = aHeight; mHeight = aHeight;
mTimestamp = 0ll; mTimestamp = 0ll;
mUpdatedTimestamp.reset();
mDuration = 0ll; mDuration = 0ll;
return GMPNoErr; return GMPNoErr;
@ -220,6 +223,7 @@ GMPErr GMPVideoi420FrameImpl::CopyFrame(const GMPVideoi420Frame& aFrame) {
mWidth = f.mWidth; mWidth = f.mWidth;
mHeight = f.mHeight; mHeight = f.mHeight;
mTimestamp = f.mTimestamp; mTimestamp = f.mTimestamp;
mUpdatedTimestamp = f.mUpdatedTimestamp;
mDuration = f.mDuration; mDuration = f.mDuration;
return GMPNoErr; return GMPNoErr;
@ -233,6 +237,7 @@ void GMPVideoi420FrameImpl::SwapFrame(GMPVideoi420Frame* aFrame) {
std::swap(mWidth, f->mWidth); std::swap(mWidth, f->mWidth);
std::swap(mHeight, f->mHeight); std::swap(mHeight, f->mHeight);
std::swap(mTimestamp, f->mTimestamp); std::swap(mTimestamp, f->mTimestamp);
std::swap(mUpdatedTimestamp, f->mUpdatedTimestamp);
std::swap(mDuration, f->mDuration); std::swap(mDuration, f->mDuration);
} }
@ -296,6 +301,14 @@ void GMPVideoi420FrameImpl::SetTimestamp(uint64_t aTimestamp) {
uint64_t GMPVideoi420FrameImpl::Timestamp() const { return mTimestamp; } uint64_t GMPVideoi420FrameImpl::Timestamp() const { return mTimestamp; }
void GMPVideoi420FrameImpl::SetUpdatedTimestamp(uint64_t aTimestamp) {
mUpdatedTimestamp = Some(aTimestamp);
}
uint64_t GMPVideoi420FrameImpl::UpdatedTimestamp() const {
return mUpdatedTimestamp ? *mUpdatedTimestamp : mTimestamp;
}
void GMPVideoi420FrameImpl::SetDuration(uint64_t aDuration) { void GMPVideoi420FrameImpl::SetDuration(uint64_t aDuration) {
mDuration = aDuration; mDuration = aDuration;
} }

View file

@ -9,6 +9,7 @@
#include "gmp-video-frame-i420.h" #include "gmp-video-frame-i420.h"
#include "mozilla/ipc/Shmem.h" #include "mozilla/ipc/Shmem.h"
#include "GMPVideoPlaneImpl.h" #include "GMPVideoPlaneImpl.h"
#include "mozilla/Maybe.h"
namespace mozilla::gmp { namespace mozilla::gmp {
@ -53,6 +54,8 @@ class GMPVideoi420FrameImpl : public GMPVideoi420Frame {
int32_t Height() const override; int32_t Height() const override;
void SetTimestamp(uint64_t aTimestamp) override; void SetTimestamp(uint64_t aTimestamp) override;
uint64_t Timestamp() const override; uint64_t Timestamp() const override;
void SetUpdatedTimestamp(uint64_t aTimestamp) override;
uint64_t UpdatedTimestamp() const override;
void SetDuration(uint64_t aDuration) override; void SetDuration(uint64_t aDuration) override;
uint64_t Duration() const override; uint64_t Duration() const override;
bool IsZeroSize() const override; bool IsZeroSize() const override;
@ -68,6 +71,7 @@ class GMPVideoi420FrameImpl : public GMPVideoi420Frame {
int32_t mWidth; int32_t mWidth;
int32_t mHeight; int32_t mHeight;
uint64_t mTimestamp; uint64_t mTimestamp;
Maybe<uint64_t> mUpdatedTimestamp;
uint64_t mDuration; uint64_t mDuration;
}; };

View file

@ -134,10 +134,24 @@ enum GMPVideoCodecMode {
kGMPCodecModeInvalid // Should always be last. kGMPCodecModeInvalid // Should always be last.
}; };
enum GMPLogLevel {
kGMPLogDefault,
kGMPLogQuiet,
kGMPLogError,
kGMPLogWarning,
kGMPLogInfo,
kGMPLogDebug,
kGMPLogDetail,
kGMPLogInvalid // Should always be last.
};
enum GMPApiVersion { enum GMPApiVersion {
kGMPVersion32 = kGMPVersion32 =
1, // leveraging that V32 had mCodecType first, and only supported H264 1, // leveraging that V32 had mCodecType first, and only supported H264
kGMPVersion33 = 33, kGMPVersion33 = 33,
// Adds GMPVideoi420Frame::SetUpdatedTimestamp/UpdatedTimestamp
kGMPVersion34 = 34,
}; };
struct GMPVideoCodec { struct GMPVideoCodec {
@ -163,6 +177,9 @@ struct GMPVideoCodec {
GMPSimulcastStream mSimulcastStream[kGMPMaxSimulcastStreams]; GMPSimulcastStream mSimulcastStream[kGMPMaxSimulcastStreams];
GMPVideoCodecMode mMode; GMPVideoCodecMode mMode;
bool mUseThreadedDecode;
GMPLogLevel mLogLevel;
}; };
// Either single encoded unit, or multiple units separated by 8/16/24/32 // Either single encoded unit, or multiple units separated by 8/16/24/32

View file

@ -129,6 +129,14 @@ class GMPVideoi420Frame : public GMPVideoFrame {
// Reset underlying plane buffers sizes to 0. This function doesn't clear // Reset underlying plane buffers sizes to 0. This function doesn't clear
// memory. // memory.
virtual void ResetSize() = 0; virtual void ResetSize() = 0;
// -- These methods have been added in kGMPVersion34 --
// Set an updated frame timestamp (microseconds) from decoder
virtual void SetUpdatedTimestamp(uint64_t aTimestamp) = 0;
// Get an updated frame timestamp (microseconds) from decoder
virtual uint64_t UpdatedTimestamp() const = 0;
}; };
#endif // GMP_VIDEO_FRAME_I420_h_ #endif // GMP_VIDEO_FRAME_I420_h_

View file

@ -10,8 +10,10 @@
#include "GMPLog.h" #include "GMPLog.h"
#include "MediaData.h" #include "MediaData.h"
#include "mozilla/EndianUtils.h" #include "mozilla/EndianUtils.h"
#include "mozilla/StaticPrefs_media.h"
#include "nsServiceManagerUtils.h" #include "nsServiceManagerUtils.h"
#include "AnnexB.h" #include "AnnexB.h"
#include "H264.h"
#include "MP4Decoder.h" #include "MP4Decoder.h"
#include "prsystem.h" #include "prsystem.h"
#include "VPXDecoder.h" #include "VPXDecoder.h"
@ -74,19 +76,33 @@ void GMPVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame) {
b.mYUVColorSpace = b.mYUVColorSpace =
DefaultColorSpace({decodedFrame->Width(), decodedFrame->Height()}); DefaultColorSpace({decodedFrame->Width(), decodedFrame->Height()});
Maybe<int64_t> streamOffset = UniquePtr<SampleMetadata> sampleData;
mStreamOffsets.Extract(decodedFrame->Timestamp()); if (auto entryHandle = mSamples.Lookup(decodedFrame->Timestamp())) {
if (NS_WARN_IF(!streamOffset)) { sampleData = std::move(entryHandle.Data());
streamOffset.emplace(mLastStreamOffset); entryHandle.Remove();
} else {
GMP_LOG_DEBUG(
"GMPVideoDecoder::Decoded(this=%p) missing sample metadata for "
"time %" PRIu64,
this, decodedFrame->Timestamp());
if (mSamples.IsEmpty()) {
// If we have no remaining samples in the table, then we have processed
// all outstanding decode requests.
ProcessReorderQueue(mDecodePromise, __func__);
}
return;
} }
MOZ_ASSERT(sampleData);
gfx::IntRect pictureRegion(0, 0, decodedFrame->Width(), gfx::IntRect pictureRegion(0, 0, decodedFrame->Width(),
decodedFrame->Height()); decodedFrame->Height());
RefPtr<VideoData> v = VideoData::CreateAndCopyData( RefPtr<VideoData> v = VideoData::CreateAndCopyData(
mConfig, mImageContainer, *streamOffset, mConfig, mImageContainer, sampleData->mOffset,
media::TimeUnit::FromMicroseconds(decodedFrame->Timestamp()), media::TimeUnit::FromMicroseconds(decodedFrame->UpdatedTimestamp()),
media::TimeUnit::FromMicroseconds(decodedFrame->Duration()), b, false, media::TimeUnit::FromMicroseconds(decodedFrame->Duration()), b,
media::TimeUnit::FromMicroseconds(-1), pictureRegion, mKnowsCompositor); sampleData->mKeyframe, media::TimeUnit::FromMicroseconds(-1),
pictureRegion, mKnowsCompositor);
RefPtr<GMPVideoDecoder> self = this; RefPtr<GMPVideoDecoder> self = this;
if (v) { if (v) {
mPerformanceRecorder.Record(static_cast<int64_t>(decodedFrame->Timestamp()), mPerformanceRecorder.Record(static_cast<int64_t>(decodedFrame->Timestamp()),
@ -99,15 +115,16 @@ void GMPVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame) {
aStage.SetColorRange(b.mColorRange); aStage.SetColorRange(b.mColorRange);
}); });
mDecodedData.AppendElement(std::move(v)); mReorderQueue.Push(std::move(v));
if (mStreamOffsets.IsEmpty()) { if (mSamples.IsEmpty()) {
// If we have no remaining offsets in the table, then we have processed // If we have no remaining samples in the table, then we have processed
// all outstanding decode requests. // all outstanding decode requests.
mDecodePromise.ResolveIfExists(std::move(mDecodedData), __func__); ProcessReorderQueue(mDecodePromise, __func__);
} }
} else { } else {
mDecodedData.Clear(); mReorderQueue.Clear();
mSamples.Clear();
mDecodePromise.RejectIfExists( mDecodePromise.RejectIfExists(
MediaResult(NS_ERROR_OUT_OF_MEMORY, MediaResult(NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("CallBack::CreateAndCopyData")), RESULT_DETAIL("CallBack::CreateAndCopyData")),
@ -128,17 +145,15 @@ void GMPVideoDecoder::ReceivedDecodedFrame(const uint64_t aPictureId) {
void GMPVideoDecoder::InputDataExhausted() { void GMPVideoDecoder::InputDataExhausted() {
GMP_LOG_DEBUG("GMPVideoDecoder::InputDataExhausted"); GMP_LOG_DEBUG("GMPVideoDecoder::InputDataExhausted");
MOZ_ASSERT(IsOnGMPThread()); MOZ_ASSERT(IsOnGMPThread());
mStreamOffsets.Clear(); mSamples.Clear();
mDecodePromise.ResolveIfExists(std::move(mDecodedData), __func__); ProcessReorderQueue(mDecodePromise, __func__);
mDecodedData = DecodedData();
} }
void GMPVideoDecoder::DrainComplete() { void GMPVideoDecoder::DrainComplete() {
GMP_LOG_DEBUG("GMPVideoDecoder::DrainComplete"); GMP_LOG_DEBUG("GMPVideoDecoder::DrainComplete");
MOZ_ASSERT(IsOnGMPThread()); MOZ_ASSERT(IsOnGMPThread());
mStreamOffsets.Clear(); mSamples.Clear();
mDrainPromise.ResolveIfExists(std::move(mDecodedData), __func__); ProcessReorderQueue(mDrainPromise, __func__);
mDecodedData = DecodedData();
} }
void GMPVideoDecoder::ResetComplete() { void GMPVideoDecoder::ResetComplete() {
@ -165,6 +180,25 @@ void GMPVideoDecoder::Terminated() {
Error(GMPErr::GMPAbortedErr); Error(GMPErr::GMPAbortedErr);
} }
void GMPVideoDecoder::ProcessReorderQueue(
MozPromiseHolder<DecodePromise>& aPromise, const char* aMethodName) {
if (aPromise.IsEmpty()) {
return;
}
DecodedData results;
size_t availableFrames = mReorderQueue.Length();
if (availableFrames > mMaxRefFrames) {
size_t resolvedFrames = availableFrames - mMaxRefFrames;
results.SetCapacity(resolvedFrames);
do {
results.AppendElement(mReorderQueue.Pop());
} while (--resolvedFrames > 0);
}
aPromise.ResolveIfExists(std::move(results), aMethodName);
}
GMPVideoDecoder::GMPVideoDecoder(const GMPVideoDecoderParams& aParams) GMPVideoDecoder::GMPVideoDecoder(const GMPVideoDecoderParams& aParams)
: mConfig(aParams.mConfig), : mConfig(aParams.mConfig),
mGMP(nullptr), mGMP(nullptr),
@ -173,7 +207,8 @@ GMPVideoDecoder::GMPVideoDecoder(const GMPVideoDecoderParams& aParams)
mCrashHelper(aParams.mCrashHelper), mCrashHelper(aParams.mCrashHelper),
mImageContainer(aParams.mImageContainer), mImageContainer(aParams.mImageContainer),
mKnowsCompositor(aParams.mKnowsCompositor), mKnowsCompositor(aParams.mKnowsCompositor),
mTrackingId(aParams.mTrackingId) {} mTrackingId(aParams.mTrackingId),
mCanDecodeBatch(StaticPrefs::media_gmp_decoder_decode_batch()) {}
void GMPVideoDecoder::InitTags(nsTArray<nsCString>& aTags) { void GMPVideoDecoder::InitTags(nsTArray<nsCString>& aTags) {
if (MP4Decoder::IsH264(mConfig.mMimeType)) { if (MP4Decoder::IsH264(mConfig.mMimeType)) {
@ -252,7 +287,7 @@ void GMPVideoDecoder::GMPInitDone(GMPVideoDecoderProxy* aGMP,
GMPVideoCodec codec; GMPVideoCodec codec;
memset(&codec, 0, sizeof(codec)); memset(&codec, 0, sizeof(codec));
codec.mGMPApiVersion = kGMPVersion33; codec.mGMPApiVersion = kGMPVersion34;
nsTArray<uint8_t> codecSpecific; nsTArray<uint8_t> codecSpecific;
if (MP4Decoder::IsH264(mConfig.mMimeType)) { if (MP4Decoder::IsH264(mConfig.mMimeType)) {
codec.mCodecType = kGMPVideoCodecH264; codec.mCodecType = kGMPVideoCodecH264;
@ -262,6 +297,7 @@ void GMPVideoDecoder::GMPInitDone(GMPVideoDecoderProxy* aGMP,
// OpenH264 expects pseudo-AVCC, but others must be passed // OpenH264 expects pseudo-AVCC, but others must be passed
// AnnexB for H264. // AnnexB for H264.
mConvertToAnnexB = !isOpenH264; mConvertToAnnexB = !isOpenH264;
mMaxRefFrames = H264::ComputeMaxRefFrames(mConfig.mExtraData);
} else if (VPXDecoder::IsVP8(mConfig.mMimeType)) { } else if (VPXDecoder::IsVP8(mConfig.mMimeType)) {
codec.mCodecType = kGMPVideoCodecVP8; codec.mCodecType = kGMPVideoCodecVP8;
} else if (VPXDecoder::IsVP9(mConfig.mMimeType)) { } else if (VPXDecoder::IsVP9(mConfig.mMimeType)) {
@ -274,6 +310,8 @@ void GMPVideoDecoder::GMPInitDone(GMPVideoDecoderProxy* aGMP,
} }
codec.mWidth = mConfig.mImage.width; codec.mWidth = mConfig.mImage.width;
codec.mHeight = mConfig.mImage.height; codec.mHeight = mConfig.mImage.height;
codec.mUseThreadedDecode = StaticPrefs::media_gmp_decoder_multithreaded();
codec.mLogLevel = GetGMPLibraryLogLevel();
nsresult rv = nsresult rv =
aGMP->InitDecode(codec, codecSpecific, this, PR_GetNumberOfProcessors()); aGMP->InitDecode(codec, codecSpecific, this, PR_GetNumberOfProcessors());
@ -368,11 +406,12 @@ RefPtr<MediaDataDecoder::DecodePromise> GMPVideoDecoder::Decode(
} }
// If we have multiple outstanding frames, we need to track which offset // If we have multiple outstanding frames, we need to track which offset
// belongs to which frame. // belongs to which frame. During seek, it is possible to get the same frame
mLastStreamOffset = sample->mOffset; // requested twice, if the old frame is still outstanding. We will simply drop
mStreamOffsets.WithEntryHandle(frameTimestamp, [&](auto entryHandle) { // the extra decoded frame and request more input if the last outstanding.
MOZ_ASSERT(!entryHandle, "Duplicate sample with matching timestamp!"); mSamples.WithEntryHandle(frameTimestamp, [&](auto entryHandle) {
entryHandle.InsertOrUpdate(sample->mOffset); auto sampleData = MakeUnique<SampleMetadata>(sample);
entryHandle.InsertOrUpdate(std::move(sampleData));
}); });
return p; return p;

View file

@ -14,8 +14,9 @@
# include "MediaInfo.h" # include "MediaInfo.h"
# include "PerformanceRecorder.h" # include "PerformanceRecorder.h"
# include "PlatformDecoderModule.h" # include "PlatformDecoderModule.h"
# include "ReorderQueue.h"
# include "mozIGeckoMediaPluginService.h" # include "mozIGeckoMediaPluginService.h"
# include "nsHashtablesFwd.h" # include "nsClassHashtable.h"
namespace mozilla { namespace mozilla {
@ -31,9 +32,9 @@ struct MOZ_STACK_CLASS GMPVideoDecoderParams {
DDLoggedTypeDeclNameAndBase(GMPVideoDecoder, MediaDataDecoder); DDLoggedTypeDeclNameAndBase(GMPVideoDecoder, MediaDataDecoder);
class GMPVideoDecoder : public MediaDataDecoder, class GMPVideoDecoder final : public MediaDataDecoder,
public GMPVideoDecoderCallbackProxy, public GMPVideoDecoderCallbackProxy,
public DecoderDoctorLifeLogger<GMPVideoDecoder> { public DecoderDoctorLifeLogger<GMPVideoDecoder> {
public: public:
explicit GMPVideoDecoder(const GMPVideoDecoderParams& aParams); explicit GMPVideoDecoder(const GMPVideoDecoderParams& aParams);
@ -50,6 +51,7 @@ class GMPVideoDecoder : public MediaDataDecoder,
return mConvertToAnnexB ? ConversionRequired::kNeedAnnexB return mConvertToAnnexB ? ConversionRequired::kNeedAnnexB
: ConversionRequired::kNeedAVCC; : ConversionRequired::kNeedAVCC;
} }
bool CanDecodeBatch() const override { return mCanDecodeBatch; }
// GMPVideoDecoderCallbackProxy // GMPVideoDecoderCallbackProxy
// All those methods are called on the GMP thread. // All those methods are called on the GMP thread.
@ -67,6 +69,8 @@ class GMPVideoDecoder : public MediaDataDecoder,
virtual nsCString GetNodeId(); virtual nsCString GetNodeId();
virtual GMPUniquePtr<GMPVideoEncodedFrame> CreateFrame(MediaRawData* aSample); virtual GMPUniquePtr<GMPVideoEncodedFrame> CreateFrame(MediaRawData* aSample);
virtual const VideoInfo& GetConfig() const; virtual const VideoInfo& GetConfig() const;
void ProcessReorderQueue(MozPromiseHolder<DecodePromise>& aPromise,
const char* aMethodName);
private: private:
class GMPInitDoneCallback : public GetGMPVideoDecoderCallback { class GMPInitDoneCallback : public GetGMPVideoDecoderCallback {
@ -91,18 +95,28 @@ class GMPVideoDecoder : public MediaDataDecoder,
MozPromiseHolder<InitPromise> mInitPromise; MozPromiseHolder<InitPromise> mInitPromise;
RefPtr<GMPCrashHelper> mCrashHelper; RefPtr<GMPCrashHelper> mCrashHelper;
int64_t mLastStreamOffset = 0; struct SampleMetadata {
nsTHashMap<nsUint64HashKey, int64_t> mStreamOffsets; explicit SampleMetadata(MediaRawData* aSample)
: mOffset(aSample->mOffset), mKeyframe(aSample->mKeyframe) {}
int64_t mOffset;
bool mKeyframe;
};
nsClassHashtable<nsUint64HashKey, SampleMetadata> mSamples;
RefPtr<layers::ImageContainer> mImageContainer; RefPtr<layers::ImageContainer> mImageContainer;
RefPtr<layers::KnowsCompositor> mKnowsCompositor; RefPtr<layers::KnowsCompositor> mKnowsCompositor;
PerformanceRecorderMulti<DecodeStage> mPerformanceRecorder; PerformanceRecorderMulti<DecodeStage> mPerformanceRecorder;
const Maybe<TrackingId> mTrackingId; const Maybe<TrackingId> mTrackingId;
uint32_t mMaxRefFrames = 0;
ReorderQueue mReorderQueue;
MozPromiseHolder<DecodePromise> mDecodePromise; MozPromiseHolder<DecodePromise> mDecodePromise;
MozPromiseHolder<DecodePromise> mDrainPromise; MozPromiseHolder<DecodePromise> mDrainPromise;
MozPromiseHolder<FlushPromise> mFlushPromise; MozPromiseHolder<FlushPromise> mFlushPromise;
DecodedData mDecodedData; DecodedData mDecodedData;
bool mConvertToAnnexB = false; bool mConvertToAnnexB = false;
bool mCanDecodeBatch = false;
}; };
} // namespace mozilla } // namespace mozilla

View file

@ -141,7 +141,7 @@ int32_t WebrtcGmpVideoEncoder::InitEncode(
GMPVideoCodec codecParams; GMPVideoCodec codecParams;
memset(&codecParams, 0, sizeof(codecParams)); memset(&codecParams, 0, sizeof(codecParams));
codecParams.mGMPApiVersion = 33; codecParams.mGMPApiVersion = kGMPVersion34;
codecParams.mStartBitrate = aCodecSettings->startBitrate; codecParams.mStartBitrate = aCodecSettings->startBitrate;
codecParams.mMinBitrate = aCodecSettings->minBitrate; codecParams.mMinBitrate = aCodecSettings->minBitrate;
codecParams.mMaxBitrate = aCodecSettings->maxBitrate; codecParams.mMaxBitrate = aCodecSettings->maxBitrate;
@ -730,7 +730,7 @@ int32_t WebrtcGmpVideoDecoder::GmpInitDone(GMPVideoDecoderProxy* aGMP,
// Bug XXXXXX: transfer settings from codecSettings to codec. // Bug XXXXXX: transfer settings from codecSettings to codec.
GMPVideoCodec codec; GMPVideoCodec codec;
memset(&codec, 0, sizeof(codec)); memset(&codec, 0, sizeof(codec));
codec.mGMPApiVersion = 33; codec.mGMPApiVersion = kGMPVersion34;
// XXX this is currently a hack // XXX this is currently a hack
// GMPVideoCodecUnion codecSpecific; // GMPVideoCodecUnion codecSpecific;
@ -1009,7 +1009,7 @@ void WebrtcGmpVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame) {
.set_video_frame_buffer(video_frame_buffer) .set_video_frame_buffer(video_frame_buffer)
.set_timestamp_rtp( .set_timestamp_rtp(
// round up // round up
(aDecodedFrame->Timestamp() * 90ll + 999) / 1000) (aDecodedFrame->UpdatedTimestamp() * 90ll + 999) / 1000)
.build(); .build();
mPerformanceRecorder.Record( mPerformanceRecorder.Record(
static_cast<int64_t>(aDecodedFrame->Timestamp()), static_cast<int64_t>(aDecodedFrame->Timestamp()),

View file

@ -10337,12 +10337,24 @@
value: 2500 value: 2500
mirror: always mirror: always
# True if we want to decode in batches.
- name: media.gmp.decoder.decode_batch
type: RelaxedAtomicBool
value: false
mirror: always
# True if we allow use of any decoders found in GMP plugins. # True if we allow use of any decoders found in GMP plugins.
- name: media.gmp.decoder.enabled - name: media.gmp.decoder.enabled
type: RelaxedAtomicBool type: RelaxedAtomicBool
value: false value: false
mirror: always mirror: always
# True if we want to request the multithreaded GMP decoder.
- name: media.gmp.decoder.multithreaded
type: RelaxedAtomicBool
value: false
mirror: always
# True if we want to try using the GMP plugin decoders first. # True if we want to try using the GMP plugin decoders first.
- name: media.gmp.decoder.preferred - name: media.gmp.decoder.preferred
type: RelaxedAtomicBool type: RelaxedAtomicBool