diff options
Diffstat (limited to 'dom/media')
25 files changed, 667 insertions, 132 deletions
diff --git a/dom/media/AudioStream.cpp b/dom/media/AudioStream.cpp index 896dee407..4b1d82c37 100644 --- a/dom/media/AudioStream.cpp +++ b/dom/media/AudioStream.cpp @@ -345,7 +345,6 @@ AudioStream::Init(uint32_t aNumChannels, uint32_t aRate, cubeb* cubebContext = CubebUtils::GetCubebContext(); if (!cubebContext) { NS_WARNING("Can't get cubeb context!"); - CubebUtils::ReportCubebStreamInitFailure(true); return NS_ERROR_DOM_MEDIA_CUBEB_INITIALIZATION_ERR; } @@ -367,10 +366,8 @@ AudioStream::OpenCubeb(cubeb* aContext, cubeb_stream_params& aParams, latency_frames, DataCallback_S, StateCallback_S, this) == CUBEB_OK) { mCubebStream.reset(stream); - CubebUtils::ReportCubebBackendUsed(); } else { NS_WARNING(nsPrintfCString("AudioStream::OpenCubeb() %p failed to init cubeb", this).get()); - CubebUtils::ReportCubebStreamInitFailure(aIsFirst); return NS_ERROR_FAILURE; } diff --git a/dom/media/CubebUtils.cpp b/dom/media/CubebUtils.cpp index 93792e63b..0f0167d9c 100644 --- a/dom/media/CubebUtils.cpp +++ b/dom/media/CubebUtils.cpp @@ -239,31 +239,6 @@ cubeb* GetCubebContextUnlocked() return sCubebContext; } -void ReportCubebBackendUsed() -{ - StaticMutexAutoLock lock(sMutex); - - sAudioStreamInitEverSucceeded = true; - - bool foundBackend = false; - for (uint32_t i = 0; i < ArrayLength(AUDIOSTREAM_BACKEND_ID_STR); i++) { - if (!strcmp(cubeb_get_backend_id(sCubebContext), AUDIOSTREAM_BACKEND_ID_STR[i])) { - foundBackend = true; - } - } -} - -void ReportCubebStreamInitFailure(bool aIsFirst) -{ - StaticMutexAutoLock lock(sMutex); - if (!aIsFirst && !sAudioStreamInitEverSucceeded) { - // This machine has no audio hardware, or it's in really bad shape, don't - // send this info, since we want CUBEB_BACKEND_INIT_FAILURE_OTHER to detect - // failures to open multiple streams in a process over time. - return; - } -} - uint32_t GetCubebPlaybackLatencyInMilliseconds() { StaticMutexAutoLock lock(sMutex); diff --git a/dom/media/CubebUtils.h b/dom/media/CubebUtils.h index fa5fc2294..f43492374 100644 --- a/dom/media/CubebUtils.h +++ b/dom/media/CubebUtils.h @@ -35,8 +35,6 @@ double GetVolumeScale(); bool GetFirstStream(); cubeb* GetCubebContext(); cubeb* GetCubebContextUnlocked(); -void ReportCubebStreamInitFailure(bool aIsFirstStream); -void ReportCubebBackendUsed(); uint32_t GetCubebPlaybackLatencyInMilliseconds(); Maybe<uint32_t> GetCubebMSGLatencyInFrames(); bool CubebLatencyPrefSet(); diff --git a/dom/media/GraphDriver.cpp b/dom/media/GraphDriver.cpp index 47762c56e..b60dfee9d 100644 --- a/dom/media/GraphDriver.cpp +++ b/dom/media/GraphDriver.cpp @@ -623,9 +623,6 @@ AudioCallbackDriver::Init() cubeb* cubebContext = CubebUtils::GetCubebContext(); if (!cubebContext) { NS_WARNING("Could not get cubeb context."); - if (!mFromFallback) { - CubebUtils::ReportCubebStreamInitFailure(true); - } return; } @@ -710,18 +707,11 @@ AudioCallbackDriver::Init() NS_WARNING_ASSERTION( rv == CUBEB_OK, "Could not set the audio stream volume in GraphDriver.cpp"); - CubebUtils::ReportCubebBackendUsed(); } else { #ifdef MOZ_WEBRTC StaticMutexAutoUnlock unlock(AudioInputCubeb::Mutex()); #endif NS_WARNING("Could not create a cubeb stream for MediaStreamGraph, falling back to a SystemClockDriver"); - // Only report failures when we're not coming from a driver that was - // created itself as a fallback driver because of a previous audio driver - // failure. - if (!mFromFallback) { - CubebUtils::ReportCubebStreamInitFailure(firstStream); - } // Fall back to a driver using a normal thread. If needed, // the graph will try to re-open an audio stream later. MonitorAutoLock lock(GraphImpl()->GetMonitor()); diff --git a/dom/media/MediaData.h b/dom/media/MediaData.h index a79aac6ed..905b4c1d9 100644 --- a/dom/media/MediaData.h +++ b/dom/media/MediaData.h @@ -14,6 +14,7 @@ #include "nsIMemoryReporter.h" #include "SharedBuffer.h" #include "mozilla/RefPtr.h" +#include "mozilla/Span.h" #include "mozilla/UniquePtr.h" #include "mozilla/UniquePtrExtensions.h" #include "nsTArray.h" @@ -631,6 +632,8 @@ public: { return sizeof(*this) + mBuffer.ComputedSizeOfExcludingThis(); } + // Access the buffer as a Span. + operator Span<const uint8_t>() { return MakeSpan(Data(), Size()); } const CryptoSample& mCrypto; RefPtr<MediaByteBuffer> mExtraData; diff --git a/dom/media/MediaPrefs.h b/dom/media/MediaPrefs.h index b237ecd3d..e67796edd 100644 --- a/dom/media/MediaPrefs.h +++ b/dom/media/MediaPrefs.h @@ -120,6 +120,9 @@ private: #ifdef MOZ_FFVPX DECL_MEDIA_PREF("media.ffvpx.enabled", PDMFFVPXEnabled, bool, true); #endif +#ifdef MOZ_AV1 + DECL_MEDIA_PREF("media.av1.enabled", AV1Enabled, bool, false); +#endif #ifdef XP_WIN DECL_MEDIA_PREF("media.wmf.enabled", PDMWMFEnabled, bool, true); DECL_MEDIA_PREF("media.wmf.skip-blacklist", PDMWMFSkipBlacklist, bool, false); diff --git a/dom/media/VideoUtils.cpp b/dom/media/VideoUtils.cpp index b1a202c03..c06ba9070 100644 --- a/dom/media/VideoUtils.cpp +++ b/dom/media/VideoUtils.cpp @@ -207,8 +207,20 @@ already_AddRefed<SharedThreadPool> GetMediaThreadPool(MediaThreadType aType) name = "MediaPlayback"; break; } - return SharedThreadPool:: + RefPtr<SharedThreadPool> pool = SharedThreadPool:: Get(nsDependentCString(name), MediaPrefs::MediaThreadPoolDefaultCount()); + + // Ensure a larger stack for platform decoder threads + if (aType == MediaThreadType::PLATFORM_DECODER) { + const uint32_t minStackSize = 512*1024; + uint32_t stackSize; + MOZ_ALWAYS_SUCCEEDS(pool->GetThreadStackSize(&stackSize)); + if (stackSize < minStackSize) { + MOZ_ALWAYS_SUCCEEDS(pool->SetThreadStackSize(minStackSize)); + } + } + + return pool.forget(); } bool @@ -426,6 +438,16 @@ ParseMIMETypeString(const nsAString& aMIMEType, return ParseCodecsString(codecsStr, aOutCodecs); } +template <int N> +static bool +StartsWith(const nsACString& string, const char (&prefix)[N]) +{ + if (N - 1 > string.Length()) { + return false; + } + return memcmp(string.Data(), prefix, N - 1) == 0; +} + bool IsH264CodecString(const nsAString& aCodec) { @@ -458,15 +480,14 @@ IsVP9CodecString(const nsAString& aCodec) aCodec.EqualsLiteral("vp9.0"); } -template <int N> -static bool -StartsWith(const nsACString& string, const char (&prefix)[N]) +#ifdef MOZ_AV1 +bool +IsAV1CodecString(const nsAString& aCodec) { - if (N - 1 > string.Length()) { - return false; - } - return memcmp(string.Data(), prefix, N - 1) == 0; + return aCodec.EqualsLiteral("av1") || + StartsWith(NS_ConvertUTF16toUTF8(aCodec), "av01"); } +#endif UniquePtr<TrackInfo> CreateTrackInfoWithMIMEType(const nsACString& aCodecMIMEType) diff --git a/dom/media/VideoUtils.h b/dom/media/VideoUtils.h index 441b63792..aaf0e9903 100644 --- a/dom/media/VideoUtils.h +++ b/dom/media/VideoUtils.h @@ -345,6 +345,11 @@ IsVP8CodecString(const nsAString& aCodec); bool IsVP9CodecString(const nsAString& aCodec); +#ifdef MOZ_AV1 +bool +IsAV1CodecString(const nsAString& aCodec); +#endif + // Try and create a TrackInfo with a given codec MIME type. UniquePtr<TrackInfo> CreateTrackInfoWithMIMEType(const nsACString& aCodecMIMEType); diff --git a/dom/media/platforms/agnostic/AOMDecoder.cpp b/dom/media/platforms/agnostic/AOMDecoder.cpp new file mode 100644 index 000000000..b5d21375e --- /dev/null +++ b/dom/media/platforms/agnostic/AOMDecoder.cpp @@ -0,0 +1,332 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "AOMDecoder.h" +#include "MediaResult.h" +#include "TimeUnits.h" +#include "aom/aomdx.h" +#include "aom/aom_image.h" +#include "gfx2DGlue.h" +#include "mozilla/PodOperations.h" +#include "mozilla/SyncRunnable.h" +#include "nsError.h" +#include "prsystem.h" + +#include <algorithm> + +#undef LOG +#define LOG(arg, ...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, ("AOMDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) + +namespace mozilla { + +using namespace gfx; +using namespace layers; + +AOMDecoder::AOMDecoder(const CreateDecoderParams& aParams) + : mImageContainer(aParams.mImageContainer) + , mTaskQueue(aParams.mTaskQueue) + , mCallback(aParams.mCallback) + , mIsFlushing(false) + , mInfo(aParams.VideoConfig()) +{ + PodZero(&mCodec); +} + +AOMDecoder::~AOMDecoder() +{ +} + +void +AOMDecoder::Shutdown() +{ + aom_codec_destroy(&mCodec); +} + +RefPtr<MediaDataDecoder::InitPromise> +AOMDecoder::Init() +{ + int decode_threads = 2; + + aom_codec_iface_t* dx = aom_codec_av1_dx(); + if (mInfo.mDisplay.width >= 2048) { + decode_threads = 8; + } + else if (mInfo.mDisplay.width >= 1024) { + decode_threads = 4; + } + decode_threads = std::min(decode_threads, PR_GetNumberOfProcessors()); + + aom_codec_dec_cfg_t config; + PodZero(&config); + config.threads = decode_threads; + config.w = config.h = 0; // set after decode + config.allow_lowbitdepth = true; + + aom_codec_flags_t flags = 0; + + if (!dx || aom_codec_dec_init(&mCodec, dx, &config, flags)) { + return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__); + } + return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__); +} + +void +AOMDecoder::Flush() +{ + MOZ_ASSERT(mCallback->OnReaderTaskQueue()); + mIsFlushing = true; + nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([this] () { + // nothing to do for now. + }); + SyncRunnable::DispatchToThread(mTaskQueue, r); + mIsFlushing = false; +} + +// Ported from third_party/aom/tools_common.c. +static aom_codec_err_t +highbd_img_downshift(aom_image_t *dst, aom_image_t *src, int down_shift) { + int plane; + if (dst->d_w != src->d_w || dst->d_h != src->d_h) + return AOM_CODEC_INVALID_PARAM; + if (dst->x_chroma_shift != src->x_chroma_shift) + return AOM_CODEC_INVALID_PARAM; + if (dst->y_chroma_shift != src->y_chroma_shift) + return AOM_CODEC_INVALID_PARAM; + if (dst->fmt != (src->fmt & ~AOM_IMG_FMT_HIGHBITDEPTH)) + return AOM_CODEC_INVALID_PARAM; + if (down_shift < 0) + return AOM_CODEC_INVALID_PARAM; + switch (dst->fmt) { + case AOM_IMG_FMT_I420: + case AOM_IMG_FMT_I422: + case AOM_IMG_FMT_I444: + break; + default: + return AOM_CODEC_INVALID_PARAM; + } + switch (src->fmt) { + case AOM_IMG_FMT_I42016: + case AOM_IMG_FMT_I42216: + case AOM_IMG_FMT_I44416: + break; + default: + // We don't support anything that's not 16 bit + return AOM_CODEC_UNSUP_BITSTREAM; + } + for (plane = 0; plane < 3; plane++) { + int w = src->d_w; + int h = src->d_h; + int x, y; + if (plane) { + w = (w + src->x_chroma_shift) >> src->x_chroma_shift; + h = (h + src->y_chroma_shift) >> src->y_chroma_shift; + } + for (y = 0; y < h; y++) { + uint16_t *p_src = + (uint16_t *)(src->planes[plane] + y * src->stride[plane]); + uint8_t *p_dst = + dst->planes[plane] + y * dst->stride[plane]; + for (x = 0; x < w; x++) *p_dst++ = (*p_src++ >> down_shift) & 0xFF; + } + } + return AOM_CODEC_OK; +} + +// UniquePtr dtor wrapper for aom_image_t. +struct AomImageFree { + void operator()(aom_image_t* img) { aom_img_free(img); } +}; + +MediaResult +AOMDecoder::DoDecode(MediaRawData* aSample) +{ + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + +#if defined(DEBUG) + NS_ASSERTION(IsKeyframe(*aSample) == aSample->mKeyframe, + "AOM Decode Keyframe error sample->mKeyframe and si.si_kf out of sync"); +#endif + + if (aom_codec_err_t r = aom_codec_decode(&mCodec, aSample->Data(), aSample->Size(), nullptr)) { + LOG("AOM Decode error: %s", aom_codec_err_to_string(r)); + return MediaResult( + NS_ERROR_DOM_MEDIA_DECODE_ERR, + RESULT_DETAIL("AOM error decoding AV1 sample: %s", aom_codec_err_to_string(r))); + } + + aom_codec_iter_t iter = nullptr; + aom_image_t *img; + UniquePtr<aom_image_t, AomImageFree> img8; + + while ((img = aom_codec_get_frame(&mCodec, &iter))) { + // Track whether the underlying buffer is 8 or 16 bits per channel. + bool highbd = bool(img->fmt & AOM_IMG_FMT_HIGHBITDEPTH); + if (highbd) { + // Downsample images with more than 8 bits per channel. + aom_img_fmt_t fmt8 = static_cast<aom_img_fmt_t>(img->fmt ^ AOM_IMG_FMT_HIGHBITDEPTH); + img8.reset(aom_img_alloc(NULL, fmt8, img->d_w, img->d_h, 16)); + if (img8 == nullptr) { + LOG("Couldn't allocate bitdepth reduction target!"); + return MediaResult( + NS_ERROR_OUT_OF_MEMORY, + RESULT_DETAIL("Couldn't allocate conversion buffer for AV1 frame")); + } + if (aom_codec_err_t r = highbd_img_downshift(img8.get(), img, img->bit_depth - 8)) { + return MediaResult( + NS_ERROR_DOM_MEDIA_DECODE_ERR, + RESULT_DETAIL("Error converting AV1 frame to 8 bits: %s", + aom_codec_err_to_string(r))); + } + // img normally points to storage owned by mCodec, so it is not freed. + // To copy out the contents of img8 we can overwrite img with an alias. + // Since img is assigned at the start of the while loop and img8 is held + // outside that loop, the alias won't outlive the storage it points to. + img = img8.get(); + highbd = false; + } + + NS_ASSERTION(img->fmt == AOM_IMG_FMT_I420 || + img->fmt == AOM_IMG_FMT_I42016 || + img->fmt == AOM_IMG_FMT_I444 || + img->fmt == AOM_IMG_FMT_I44416, + "AV1 image format not I420 or I444"); + + // Chroma shifts are rounded down as per the decoding examples in the SDK + VideoData::YCbCrBuffer b; + b.mPlanes[0].mData = img->planes[0]; + b.mPlanes[0].mStride = img->stride[0]; + b.mPlanes[0].mHeight = img->d_h; + b.mPlanes[0].mWidth = img->d_w; + b.mPlanes[0].mOffset = 0; + b.mPlanes[0].mSkip = highbd ? 1 : 0; + + b.mPlanes[1].mData = img->planes[1]; + b.mPlanes[1].mStride = img->stride[1]; + b.mPlanes[1].mOffset = 0; + b.mPlanes[1].mSkip = highbd ? 1 : 0; + + b.mPlanes[2].mData = img->planes[2]; + b.mPlanes[2].mStride = img->stride[2]; + b.mPlanes[2].mOffset = 0; + b.mPlanes[2].mSkip = highbd ? 1 : 0; + + if (img->fmt == AOM_IMG_FMT_I420 || + img->fmt == AOM_IMG_FMT_I42016) { + b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift; + b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift; + + b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift; + b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift; + } else if (img->fmt == AOM_IMG_FMT_I444) { + b.mPlanes[1].mHeight = img->d_h; + b.mPlanes[1].mWidth = img->d_w; + + b.mPlanes[2].mHeight = img->d_h; + b.mPlanes[2].mWidth = img->d_w; + } else { + LOG("AOM Unknown image format"); + return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, + RESULT_DETAIL("AOM Unknown image format")); + } + + RefPtr<VideoData> v = + VideoData::CreateAndCopyData(mInfo, + mImageContainer, + aSample->mOffset, + aSample->mTime, + aSample->mDuration, + b, + aSample->mKeyframe, + aSample->mTimecode, + mInfo.ScaledImageRect(img->d_w, + img->d_h)); + + if (!v) { + LOG("Image allocation error source %ux%u display %ux%u picture %ux%u", + img->d_w, img->d_h, mInfo.mDisplay.width, mInfo.mDisplay.height, + mInfo.mImage.width, mInfo.mImage.height); + return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__); + } + mCallback->Output(v); + } + return NS_OK; +} + +void +AOMDecoder::ProcessDecode(MediaRawData* aSample) +{ + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + if (mIsFlushing) { + return; + } + MediaResult rv = DoDecode(aSample); + if (NS_FAILED(rv)) { + mCallback->Error(rv); + } else { + mCallback->InputExhausted(); + } +} + +void +AOMDecoder::Input(MediaRawData* aSample) +{ + MOZ_ASSERT(mCallback->OnReaderTaskQueue()); + mTaskQueue->Dispatch(NewRunnableMethod<RefPtr<MediaRawData>>( + this, &AOMDecoder::ProcessDecode, aSample)); +} + +void +AOMDecoder::ProcessDrain() +{ + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + mCallback->DrainComplete(); +} + +void +AOMDecoder::Drain() +{ + MOZ_ASSERT(mCallback->OnReaderTaskQueue()); + mTaskQueue->Dispatch(NewRunnableMethod(this, &AOMDecoder::ProcessDrain)); +} + +/* static */ +bool +AOMDecoder::IsAV1(const nsACString& aMimeType) +{ + return aMimeType.EqualsLiteral("video/webm; codecs=av1") || + aMimeType.EqualsLiteral("video/av1"); +} + +/* static */ +bool +AOMDecoder::IsKeyframe(Span<const uint8_t> aBuffer) { + aom_codec_stream_info_t info; + PodZero(&info); + + aom_codec_peek_stream_info(aom_codec_av1_dx(), + aBuffer.Elements(), + aBuffer.Length(), + &info); + + return bool(info.is_kf); +} + +/* static */ +nsIntSize +AOMDecoder::GetFrameSize(Span<const uint8_t> aBuffer) { + aom_codec_stream_info_t info; + PodZero(&info); + + aom_codec_peek_stream_info(aom_codec_av1_dx(), + aBuffer.Elements(), + aBuffer.Length(), + &info); + + return nsIntSize(info.w, info.h); +} + +} // namespace mozilla +#undef LOG diff --git a/dom/media/platforms/agnostic/AOMDecoder.h b/dom/media/platforms/agnostic/AOMDecoder.h new file mode 100644 index 000000000..ec6b1f95a --- /dev/null +++ b/dom/media/platforms/agnostic/AOMDecoder.h @@ -0,0 +1,62 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(AOMDecoder_h_) +#define AOMDecoder_h_ + +#include "PlatformDecoderModule.h" +#include "mozilla/Span.h" + +#include <stdint.h> +#include "aom/aom_decoder.h" + +namespace mozilla { + +class AOMDecoder : public MediaDataDecoder +{ +public: + explicit AOMDecoder(const CreateDecoderParams& aParams); + + RefPtr<InitPromise> Init() override; + void Input(MediaRawData* aSample) override; + void Flush() override; + void Drain() override; + void Shutdown() override; + const char* GetDescriptionName() const override + { + return "libaom (AV1) video decoder"; + } + + // Return true if aMimeType is a one of the strings used + // by our demuxers to identify AV1 streams. + static bool IsAV1(const nsACString& aMimeType); + + // Return true if a sample is a keyframe. + static bool IsKeyframe(Span<const uint8_t> aBuffer); + + // Return the frame dimensions for a sample. + static nsIntSize GetFrameSize(Span<const uint8_t> aBuffer); + +private: + ~AOMDecoder(); + void ProcessDecode(MediaRawData* aSample); + MediaResult DoDecode(MediaRawData* aSample); + void ProcessDrain(); + + const RefPtr<layers::ImageContainer> mImageContainer; + const RefPtr<TaskQueue> mTaskQueue; + MediaDataDecoderCallback* mCallback; + Atomic<bool> mIsFlushing; + + // AOM decoder state + aom_codec_ctx_t mCodec; + + const VideoInfo& mInfo; +}; + +} // namespace mozilla + +#endif // AOMDecoder_h_ diff --git a/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp b/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp index 7bd75b7fe..cf5ed3d22 100644 --- a/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp +++ b/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp @@ -5,6 +5,7 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "AgnosticDecoderModule.h" +#include "MediaPrefs.h" #include "mozilla/Logging.h" #include "OpusDecoder.h" #include "VorbisDecoder.h" @@ -12,6 +13,10 @@ #include "WAVDecoder.h" #include "TheoraDecoder.h" +#ifdef MOZ_AV1 +#include "AOMDecoder.h" +#endif + namespace mozilla { bool @@ -24,6 +29,11 @@ AgnosticDecoderModule::SupportsMimeType(const nsACString& aMimeType, VorbisDataDecoder::IsVorbis(aMimeType) || WaveDataDecoder::IsWave(aMimeType) || TheoraDecoder::IsTheora(aMimeType); +#ifdef MOZ_AV1 + if (MediaPrefs::AV1Enabled()) { + supports |= AOMDecoder::IsAV1(aMimeType); + } +#endif MOZ_LOG(sPDMLog, LogLevel::Debug, ("Agnostic decoder %s requested type", supports ? "supports" : "rejects")); return supports; @@ -36,7 +46,14 @@ AgnosticDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams) if (VPXDecoder::IsVPX(aParams.mConfig.mMimeType)) { m = new VPXDecoder(aParams); - } else if (TheoraDecoder::IsTheora(aParams.mConfig.mMimeType)) { + } +#ifdef MOZ_AV1 + else if (AOMDecoder::IsAV1(aParams.mConfig.mMimeType) && + MediaPrefs::AV1Enabled()) { + m = new AOMDecoder(aParams); + } +#endif + else if (TheoraDecoder::IsTheora(aParams.mConfig.mMimeType)) { m = new TheoraDecoder(aParams); } diff --git a/dom/media/platforms/agnostic/VPXDecoder.cpp b/dom/media/platforms/agnostic/VPXDecoder.cpp index 77c81b51b..f2f84487f 100644 --- a/dom/media/platforms/agnostic/VPXDecoder.cpp +++ b/dom/media/platforms/agnostic/VPXDecoder.cpp @@ -22,7 +22,7 @@ namespace mozilla { using namespace gfx; using namespace layers; -static int MimeTypeToCodec(const nsACString& aMimeType) +static VPXDecoder::Codec MimeTypeToCodec(const nsACString& aMimeType) { if (aMimeType.EqualsLiteral("video/webm; codecs=vp8")) { return VPXDecoder::Codec::VP8; @@ -31,7 +31,7 @@ static int MimeTypeToCodec(const nsACString& aMimeType) } else if (aMimeType.EqualsLiteral("video/vp9")) { return VPXDecoder::Codec::VP9; } - return -1; + return VPXDecoder::Codec::Unknown; } VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams) @@ -101,17 +101,10 @@ MediaResult VPXDecoder::DoDecode(MediaRawData* aSample) { MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + #if defined(DEBUG) - vpx_codec_stream_info_t si; - PodZero(&si); - si.sz = sizeof(si); - if (mCodec == Codec::VP8) { - vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), aSample->Data(), aSample->Size(), &si); - } else if (mCodec == Codec::VP9) { - vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), aSample->Data(), aSample->Size(), &si); - } - NS_ASSERTION(bool(si.is_kf) == aSample->mKeyframe, - "VPX Decode Keyframe error sample->mKeyframe and si.si_kf out of sync"); + NS_ASSERTION(IsKeyframe(*aSample, mCodec) == aSample->mKeyframe, + "VPX Decode Keyframe error sample->mKeyframe and sample data out of sync"); #endif if (vpx_codec_err_t r = vpx_codec_decode(&mVPX, aSample->Data(), aSample->Size(), nullptr, 0)) { @@ -249,5 +242,41 @@ VPXDecoder::IsVP9(const nsACString& aMimeType) return IsVPX(aMimeType, VPXDecoder::VP9); } +/* static */ +bool +VPXDecoder::IsKeyframe(Span<const uint8_t> aBuffer, Codec aCodec) +{ + vpx_codec_stream_info_t si; + PodZero(&si); + si.sz = sizeof(si); + + if (aCodec == Codec::VP8) { + vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), aBuffer.Elements(), aBuffer.Length(), &si); + return bool(si.is_kf); + } else if (aCodec == Codec::VP9) { + vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), aBuffer.Elements(), aBuffer.Length(), &si); + return bool(si.is_kf); + } + + return false; +} + +/* static */ +nsIntSize +VPXDecoder::GetFrameSize(Span<const uint8_t> aBuffer, Codec aCodec) +{ + vpx_codec_stream_info_t si; + PodZero(&si); + si.sz = sizeof(si); + + if (aCodec == Codec::VP8) { + vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), aBuffer.Elements(), aBuffer.Length(), &si); + } else if (aCodec == Codec::VP9) { + vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), aBuffer.Elements(), aBuffer.Length(), &si); + } + + return nsIntSize(si.w, si.h); +} + } // namespace mozilla #undef LOG diff --git a/dom/media/platforms/agnostic/VPXDecoder.h b/dom/media/platforms/agnostic/VPXDecoder.h index d420ec069..4e8d83617 100644 --- a/dom/media/platforms/agnostic/VPXDecoder.h +++ b/dom/media/platforms/agnostic/VPXDecoder.h @@ -7,6 +7,7 @@ #define VPXDecoder_h_ #include "PlatformDecoderModule.h" +#include "mozilla/Span.h" #include <stdint.h> #define VPX_DONT_DEFINE_STDINT_TYPES @@ -36,7 +37,8 @@ public: enum Codec: uint8_t { VP8 = 1 << 0, - VP9 = 1 << 1 + VP9 = 1 << 1, + Unknown = 1 << 7, }; // Return true if aMimeType is a one of the strings used by our demuxers to @@ -46,6 +48,12 @@ public: static bool IsVP8(const nsACString& aMimeType); static bool IsVP9(const nsACString& aMimeType); + // Return true if a sample is a keyframe for the specified codec. + static bool IsKeyframe(Span<const uint8_t> aBuffer, Codec aCodec); + + // Return the frame dimensions for a sample for the specified codec. + static nsIntSize GetFrameSize(Span<const uint8_t> aBuffer, Codec aCodec); + private: void ProcessDecode(MediaRawData* aSample); MediaResult DoDecode(MediaRawData* aSample); @@ -61,7 +69,7 @@ private: const VideoInfo& mInfo; - const int mCodec; + const Codec mCodec; }; } // namespace mozilla diff --git a/dom/media/platforms/ffmpeg/FFmpegDataDecoder.cpp b/dom/media/platforms/ffmpeg/FFmpegDataDecoder.cpp index 8cb5c8578..8655ce25f 100644 --- a/dom/media/platforms/ffmpeg/FFmpegDataDecoder.cpp +++ b/dom/media/platforms/ffmpeg/FFmpegDataDecoder.cpp @@ -69,12 +69,21 @@ FFmpegDataDecoder<LIBAV_VER>::InitDecoder() mCodecContext->extradata_size = mExtraData->Length(); // FFmpeg may use SIMD instructions to access the data which reads the // data in 32 bytes block. Must ensure we have enough data to read. + uint32_t padding_size = #if LIBAVCODEC_VERSION_MAJOR >= 58 - mExtraData->AppendElements(AV_INPUT_BUFFER_PADDING_SIZE); + AV_INPUT_BUFFER_PADDING_SIZE; #else - mExtraData->AppendElements(FF_INPUT_BUFFER_PADDING_SIZE); + FF_INPUT_BUFFER_PADDING_SIZE; #endif - mCodecContext->extradata = mExtraData->Elements(); + mCodecContext->extradata = static_cast<uint8_t*>( + mLib->av_malloc(mExtraData->Length() + padding_size)); + if (!mCodecContext->extradata) { + return MediaResult(NS_ERROR_OUT_OF_MEMORY, + RESULT_DETAIL("Couldn't init ffmpeg extradata")); + } + memcpy(mCodecContext->extradata, + mExtraData->Elements(), + mExtraData->Length()); } else { mCodecContext->extradata_size = 0; } @@ -165,6 +174,9 @@ FFmpegDataDecoder<LIBAV_VER>::ProcessShutdown() StaticMutexAutoLock mon(sMonitor); if (mCodecContext) { + if (mCodecContext->extradata) { + mLib->av_freep(&mCodecContext->extradata); + } mLib->avcodec_close(mCodecContext); mLib->av_freep(&mCodecContext); #if LIBAVCODEC_VERSION_MAJOR >= 55 diff --git a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp index 6302882a6..e1c326818 100644 --- a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp +++ b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp @@ -7,6 +7,7 @@ #include "MediaPrefs.h" #include "mozilla/PodOperations.h" #include "mozilla/Types.h" +#include "PlatformDecoderModule.h" #include "prlink.h" #define AV_LOG_DEBUG 48 diff --git a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h index d6944a1d8..c6c43a4ae 100644 --- a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h +++ b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h @@ -5,6 +5,7 @@ #ifndef __FFmpegLibWrapper_h__ #define __FFmpegLibWrapper_h__ +#include "mozilla/Attributes.h" #include "mozilla/Types.h" struct AVCodec; @@ -91,4 +92,4 @@ private: } // namespace mozilla -#endif // FFmpegLibWrapper
\ No newline at end of file +#endif // FFmpegLibWrapper diff --git a/dom/media/platforms/ffmpeg/ffvpx/moz.build b/dom/media/platforms/ffmpeg/ffvpx/moz.build index aee58b5b0..c0041a4d4 100644 --- a/dom/media/platforms/ffmpeg/ffvpx/moz.build +++ b/dom/media/platforms/ffmpeg/ffvpx/moz.build @@ -20,7 +20,7 @@ SOURCES += [ ] LOCAL_INCLUDES += [ '..', - '../ffmpeg57/include', + '../ffmpeg58/include', ] if CONFIG['OS_ARCH'] == 'WINNT': diff --git a/dom/media/platforms/moz.build b/dom/media/platforms/moz.build index 3fb0cc842..be13d31c4 100644 --- a/dom/media/platforms/moz.build +++ b/dom/media/platforms/moz.build @@ -55,6 +55,14 @@ if CONFIG['MOZ_FFMPEG']: 'ffmpeg', ] +if CONFIG['MOZ_AV1']: + EXPORTS += [ + 'agnostic/AOMDecoder.h', + ] + UNIFIED_SOURCES += [ + 'agnostic/AOMDecoder.cpp', + ] + if CONFIG['MOZ_APPLEMEDIA']: EXPORTS += [ 'apple/AppleDecoderModule.h', diff --git a/dom/media/test/bug580982.webm b/dom/media/test/bug1377278.webm Binary files differindex 802019f39..802019f39 100644 --- a/dom/media/test/bug580982.webm +++ b/dom/media/test/bug1377278.webm diff --git a/dom/media/test/bug580982.webm^headers^ b/dom/media/test/bug1377278.webm^headers^ index 4030ea1d3..4030ea1d3 100644 --- a/dom/media/test/bug580982.webm^headers^ +++ b/dom/media/test/bug1377278.webm^headers^ diff --git a/dom/media/test/manifest.js b/dom/media/test/manifest.js index c6d533c1b..7e30cc97d 100644 --- a/dom/media/test/manifest.js +++ b/dom/media/test/manifest.js @@ -224,6 +224,9 @@ var gPlayTests = [ // Test playback of a webm file { name:"seek-short.webm", type:"video/webm", duration:0.23 }, + // Test playback of a webm file with 'matroska' doctype + { name:"bug1377278.webm", type:"video/webm", duration:4.0 }, + // Test playback of a WebM file with non-zero start time. { name:"split.webm", type:"video/webm", duration:1.967 }, @@ -532,7 +535,6 @@ var gErrorTests = [ { name:"448636.ogv", type:"video/ogg" }, { name:"bug504843.ogv", type:"video/ogg" }, { name:"bug501279.ogg", type:"audio/ogg" }, - { name:"bug580982.webm", type:"video/webm" }, { name:"bug603918.webm", type:"video/webm" }, { name:"bug604067.webm", type:"video/webm" }, { name:"bogus.duh", type:"bogus/duh" } diff --git a/dom/media/test/mochitest.ini b/dom/media/test/mochitest.ini index ddabf78b6..742ac1b1c 100644 --- a/dom/media/test/mochitest.ini +++ b/dom/media/test/mochitest.ini @@ -382,8 +382,6 @@ support-files = bug556821.ogv^headers^ bug557094.ogv bug557094.ogv^headers^ - bug580982.webm - bug580982.webm^headers^ bug603918.webm bug603918.webm^headers^ bug604067.webm @@ -395,6 +393,8 @@ support-files = bug1301226.wav^headers^ bug1301226-odd.wav bug1301226-odd.wav^headers^ + bug1377278.webm + bug1377278.webm^headers^ can_play_type_dash.js can_play_type_ogg.js can_play_type_wave.js diff --git a/dom/media/webm/WebMDecoder.cpp b/dom/media/webm/WebMDecoder.cpp index b41de6d40..9575d6e42 100644 --- a/dom/media/webm/WebMDecoder.cpp +++ b/dom/media/webm/WebMDecoder.cpp @@ -5,6 +5,10 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "mozilla/Preferences.h" +#ifdef MOZ_AV1 +#include "AOMDecoder.h" +#endif +#include "MediaPrefs.h" #include "MediaDecoderStateMachine.h" #include "WebMDemuxer.h" #include "WebMDecoder.h" @@ -65,6 +69,11 @@ WebMDecoder::CanHandleMediaType(const nsACString& aMIMETypeExcludingCodecs, continue; } +#ifdef MOZ_AV1 + if (MediaPrefs::AV1Enabled() && IsAV1CodecString(codec)) { + continue; + } +#endif // Some unsupported codec. return false; } diff --git a/dom/media/webm/WebMDemuxer.cpp b/dom/media/webm/WebMDemuxer.cpp index 20ed71581..013da9b2c 100644 --- a/dom/media/webm/WebMDemuxer.cpp +++ b/dom/media/webm/WebMDemuxer.cpp @@ -8,7 +8,11 @@ #include "MediaDecoderStateMachine.h" #include "AbstractMediaDecoder.h" #include "MediaResource.h" +#ifdef MOZ_AV1 +#include "AOMDecoder.h" +#endif #include "OpusDecoder.h" +#include "VPXDecoder.h" #include "WebMDemuxer.h" #include "WebMBufferedParser.h" #include "gfx2DGlue.h" @@ -24,12 +28,9 @@ #include "mozilla/Sprintf.h" #include <algorithm> +#include <numeric> #include <stdint.h> -#define VPX_DONT_DEFINE_STDINT_TYPES -#include "vpx/vp8dx.h" -#include "vpx/vpx_decoder.h" - #define WEBM_DEBUG(arg, ...) MOZ_LOG(gMediaDemuxerLog, mozilla::LogLevel::Debug, ("WebMDemuxer(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) extern mozilla::LazyLogModule gMediaDemuxerLog; @@ -322,6 +323,9 @@ WebMDemuxer::ReadMetadata() case NESTEGG_CODEC_VP9: mInfo.mVideo.mMimeType = "video/webm; codecs=vp9"; break; + case NESTEGG_CODEC_AV1: + mInfo.mVideo.mMimeType = "video/webm; codecs=av1"; + break; default: NS_WARNING("Unknown WebM video codec"); return NS_ERROR_FAILURE; @@ -549,7 +553,7 @@ WebMDemuxer::GetTrackCrypto(TrackInfo::TrackType aType, size_t aTrackNumber) { return crypto; } -bool +nsresult WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSamples) { if (mIsMediaSource) { @@ -557,17 +561,18 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl EnsureUpToDateIndex(); } - RefPtr<NesteggPacketHolder> holder(NextPacket(aType)); + RefPtr<NesteggPacketHolder> holder; + nsresult rv = NextPacket(aType, holder); - if (!holder) { - return false; + if (NS_FAILED(rv)) { + return rv; } int r = 0; unsigned int count = 0; r = nestegg_packet_count(holder->Packet(), &count); if (r == -1) { - return false; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } int64_t tstamp = holder->Timestamp(); int64_t duration = holder->Duration(); @@ -578,7 +583,11 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl // video frame. int64_t next_tstamp = INT64_MIN; if (aType == TrackInfo::kAudioTrack) { - RefPtr<NesteggPacketHolder> next_holder(NextPacket(aType)); + RefPtr<NesteggPacketHolder> next_holder; + rv = NextPacket(aType, next_holder); + if (NS_FAILED(rv) && rv != NS_ERROR_DOM_MEDIA_END_OF_STREAM) { + return rv; + } if (next_holder) { next_tstamp = next_holder->Timestamp(); PushAudioPacket(next_holder); @@ -593,7 +602,11 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl } mLastAudioFrameTime = Some(tstamp); } else if (aType == TrackInfo::kVideoTrack) { - RefPtr<NesteggPacketHolder> next_holder(NextPacket(aType)); + RefPtr<NesteggPacketHolder> next_holder; + rv = NextPacket(aType, next_holder); + if (NS_FAILED(rv) && rv != NS_ERROR_DOM_MEDIA_END_OF_STREAM) { + return rv; + } if (next_holder) { next_tstamp = next_holder->Timestamp(); PushVideoPacket(next_holder); @@ -610,7 +623,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl } if (mIsMediaSource && next_tstamp == INT64_MIN) { - return false; + return NS_ERROR_DOM_MEDIA_END_OF_STREAM; } int64_t discardPadding = 0; @@ -626,7 +639,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl r = nestegg_packet_data(holder->Packet(), i, &data, &length); if (r == -1) { WEBM_DEBUG("nestegg_packet_data failed r=%d", r); - return false; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } bool isKeyframe = false; if (aType == TrackInfo::kAudioTrack) { @@ -636,29 +649,46 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl // Packet is encrypted, can't peek, use packet info isKeyframe = nestegg_packet_has_keyframe(holder->Packet()) == NESTEGG_PACKET_HAS_KEYFRAME_TRUE; } else { - vpx_codec_stream_info_t si; - PodZero(&si); - si.sz = sizeof(si); + auto sample = MakeSpan(data, length); switch (mVideoCodec) { case NESTEGG_CODEC_VP8: - vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si); + isKeyframe = VPXDecoder::IsKeyframe(sample, VPXDecoder::Codec::VP8); break; case NESTEGG_CODEC_VP9: - vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si); + isKeyframe = VPXDecoder::IsKeyframe(sample, VPXDecoder::Codec::VP9); + break; +#ifdef MOZ_AV1 + case NESTEGG_CODEC_AV1: + isKeyframe = AOMDecoder::IsKeyframe(sample); break; +#endif + default: + NS_WARNING("Cannot detect keyframes in unknown WebM video codec"); + return NS_ERROR_FAILURE; } - isKeyframe = si.is_kf; if (isKeyframe) { - // We only look for resolution changes on keyframes for both VP8 and - // VP9. Other resolution changes are invalid. - if (mLastSeenFrameWidth.isSome() && mLastSeenFrameHeight.isSome() && - (si.w != mLastSeenFrameWidth.value() || - si.h != mLastSeenFrameHeight.value())) { - mInfo.mVideo.mDisplay = nsIntSize(si.w, si.h); + // For both VP8 and VP9, we only look for resolution changes + // on keyframes. Other resolution changes are invalid. + auto dimensions = nsIntSize(0, 0); + switch (mVideoCodec) { + case NESTEGG_CODEC_VP8: + dimensions = VPXDecoder::GetFrameSize(sample, VPXDecoder::Codec::VP8); + break; + case NESTEGG_CODEC_VP9: + dimensions = VPXDecoder::GetFrameSize(sample, VPXDecoder::Codec::VP9); + break; +#ifdef MOZ_AV1 + case NESTEGG_CODEC_AV1: + dimensions = AOMDecoder::GetFrameSize(sample); + break; +#endif + } + if (mLastSeenFrameSize.isSome() + && (dimensions != mLastSeenFrameSize.value())) { + mInfo.mVideo.mDisplay = dimensions; mSharedVideoTrackInfo = new SharedTrackInfo(mInfo.mVideo, ++sStreamSourceID); } - mLastSeenFrameWidth = Some(si.w); - mLastSeenFrameHeight = Some(si.h); + mLastSeenFrameSize = Some(dimensions); } } } @@ -668,7 +698,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl RefPtr<MediaRawData> sample = new MediaRawData(data, length); if (length && !sample->Data()) { // OOM. - return false; + return NS_ERROR_OUT_OF_MEMORY; } sample->mTimecode = tstamp; sample->mTime = tstamp; @@ -721,11 +751,12 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl } aSamples->Push(sample); } - return true; + return NS_OK; } -RefPtr<NesteggPacketHolder> -WebMDemuxer::NextPacket(TrackInfo::TrackType aType) +nsresult +WebMDemuxer::NextPacket(TrackInfo::TrackType aType, + RefPtr<NesteggPacketHolder>& aPacket) { bool isVideo = aType == TrackInfo::kVideoTrack; @@ -734,56 +765,64 @@ WebMDemuxer::NextPacket(TrackInfo::TrackType aType) bool hasType = isVideo ? mHasVideo : mHasAudio; if (!hasType) { - return nullptr; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } // The packet queue for the type that we are interested in. WebMPacketQueue &packets = isVideo ? mVideoPackets : mAudioPackets; if (packets.GetSize() > 0) { - return packets.PopFront(); + aPacket = packets.PopFront(); + return NS_OK; } // Track we are interested in uint32_t ourTrack = isVideo ? mVideoTrack : mAudioTrack; do { - RefPtr<NesteggPacketHolder> holder = DemuxPacket(aType); + RefPtr<NesteggPacketHolder> holder; + nsresult rv = DemuxPacket(aType, holder); + if (NS_FAILED(rv)) { + return rv; + } if (!holder) { - return nullptr; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } if (ourTrack == holder->Track()) { - return holder; + aPacket = holder; + return NS_OK; } } while (true); } -RefPtr<NesteggPacketHolder> -WebMDemuxer::DemuxPacket(TrackInfo::TrackType aType) +nsresult +WebMDemuxer::DemuxPacket(TrackInfo::TrackType aType, + RefPtr<NesteggPacketHolder>& aPacket) { nestegg_packet* packet; int r = nestegg_read_packet(Context(aType), &packet); if (r == 0) { nestegg_read_reset(Context(aType)); - return nullptr; + return NS_ERROR_DOM_MEDIA_END_OF_STREAM; } else if (r < 0) { - return nullptr; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } unsigned int track = 0; r = nestegg_packet_track(packet, &track); if (r == -1) { - return nullptr; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } int64_t offset = Resource(aType).Tell(); RefPtr<NesteggPacketHolder> holder = new NesteggPacketHolder(); if (!holder->Init(packet, offset, track, false)) { - return nullptr; + return NS_ERROR_DOM_MEDIA_DEMUXER_ERR; } - return holder; + aPacket = holder; + return NS_OK; } void @@ -943,7 +982,14 @@ WebMTrackDemuxer::Seek(media::TimeUnit aTime) media::TimeUnit seekTime = aTime; mSamples.Reset(); mParent->SeekInternal(mType, aTime); - mParent->GetNextPacket(mType, &mSamples); + nsresult rv = mParent->GetNextPacket(mType, &mSamples); + if (NS_FAILED(rv)) { + if (rv == NS_ERROR_DOM_MEDIA_END_OF_STREAM) { + // Ignore the error for now, the next GetSample will be rejected with EOS. + return SeekPromise::CreateAndResolve(media::TimeUnit(), __func__); + } + return SeekPromise::CreateAndReject(rv, __func__); + } mNeedKeyframe = true; // Check what time we actually seeked to. @@ -956,15 +1002,18 @@ WebMTrackDemuxer::Seek(media::TimeUnit aTime) return SeekPromise::CreateAndResolve(seekTime, __func__); } -RefPtr<MediaRawData> -WebMTrackDemuxer::NextSample() +nsresult +WebMTrackDemuxer::NextSample(RefPtr<MediaRawData>& aData) { - while (mSamples.GetSize() < 1 && mParent->GetNextPacket(mType, &mSamples)) { + nsresult rv; + while (mSamples.GetSize() < 1 && + NS_SUCCEEDED((rv = mParent->GetNextPacket(mType, &mSamples)))) { } if (mSamples.GetSize()) { - return mSamples.PopFront(); + aData = mSamples.PopFront(); + return NS_OK; } - return nullptr; + return rv; } RefPtr<WebMTrackDemuxer::SamplesPromise> @@ -973,9 +1022,12 @@ WebMTrackDemuxer::GetSamples(int32_t aNumSamples) RefPtr<SamplesHolder> samples = new SamplesHolder; MOZ_ASSERT(aNumSamples); + nsresult rv = NS_ERROR_DOM_MEDIA_END_OF_STREAM; + while (aNumSamples) { - RefPtr<MediaRawData> sample(NextSample()); - if (!sample) { + RefPtr<MediaRawData> sample; + rv = NextSample(sample); + if (NS_FAILED(rv)) { break; } if (mNeedKeyframe && !sample->mKeyframe) { @@ -987,7 +1039,7 @@ WebMTrackDemuxer::GetSamples(int32_t aNumSamples) } if (samples->mSamples.IsEmpty()) { - return SamplesPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_END_OF_STREAM, __func__); + return SamplesPromise::CreateAndReject(rv, __func__); } else { UpdateSamples(samples->mSamples); return SamplesPromise::CreateAndResolve(samples, __func__); @@ -1022,7 +1074,8 @@ WebMTrackDemuxer::SetNextKeyFrameTime() } // Demux and buffer frames until we find a keyframe. RefPtr<MediaRawData> sample; - while (!foundKeyframe && (sample = NextSample())) { + nsresult rv = NS_OK; + while (!foundKeyframe && NS_SUCCEEDED((rv = NextSample(sample)))) { if (sample->mKeyframe) { frameTime = sample->mTime; foundKeyframe = true; @@ -1104,10 +1157,11 @@ WebMTrackDemuxer::SkipToNextRandomAccessPoint(media::TimeUnit aTimeThreshold) uint32_t parsed = 0; bool found = false; RefPtr<MediaRawData> sample; + nsresult rv = NS_OK; int64_t sampleTime; WEBM_DEBUG("TimeThreshold: %f", aTimeThreshold.ToSeconds()); - while (!found && (sample = NextSample())) { + while (!found && NS_SUCCEEDED((rv = NextSample(sample)))) { parsed++; sampleTime = sample->mTime; if (sample->mKeyframe && sampleTime >= aTimeThreshold.ToMicroseconds()) { @@ -1116,7 +1170,9 @@ WebMTrackDemuxer::SkipToNextRandomAccessPoint(media::TimeUnit aTimeThreshold) mSamples.PushFront(sample.forget()); } } - SetNextKeyFrameTime(); + if (NS_SUCCEEDED(rv)) { + SetNextKeyFrameTime(); + } if (found) { WEBM_DEBUG("next sample: %f (parsed: %d)", media::TimeUnit::FromMicroseconds(sampleTime).ToSeconds(), diff --git a/dom/media/webm/WebMDemuxer.h b/dom/media/webm/WebMDemuxer.h index 6fff38e7d..36d381e57 100644 --- a/dom/media/webm/WebMDemuxer.h +++ b/dom/media/webm/WebMDemuxer.h @@ -8,9 +8,13 @@ #include "nsTArray.h" #include "MediaDataDemuxer.h" +#include "MediaResource.h" #include "NesteggPacketHolder.h" #include "mozilla/Move.h" +#include <deque> +#include <stdint.h> + typedef struct nestegg nestegg; namespace mozilla { @@ -111,7 +115,8 @@ public: bool GetOffsetForTime(uint64_t aTime, int64_t* aOffset); // Demux next WebM packet and append samples to MediaRawDataQueue - bool GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSamples); + nsresult GetNextPacket(TrackInfo::TrackType aType, + MediaRawDataQueue *aSamples); nsresult Reset(TrackInfo::TrackType aType); @@ -175,11 +180,13 @@ private: // Read a packet from the nestegg file. Returns nullptr if all packets for // the particular track have been read. Pass TrackInfo::kVideoTrack or // TrackInfo::kVideoTrack to indicate the type of the packet we want to read. - RefPtr<NesteggPacketHolder> NextPacket(TrackInfo::TrackType aType); + nsresult NextPacket(TrackInfo::TrackType aType, + RefPtr<NesteggPacketHolder>& aPacket); // Internal method that demuxes the next packet from the stream. The caller // is responsible for making sure it doesn't get lost. - RefPtr<NesteggPacketHolder> DemuxPacket(TrackInfo::TrackType aType); + nsresult DemuxPacket(TrackInfo::TrackType aType, + RefPtr<NesteggPacketHolder>& aPacket); // libnestegg audio and video context for webm container. // Access on reader's thread only. @@ -237,8 +244,7 @@ private: int64_t mLastWebMBlockOffset; const bool mIsMediaSource; - Maybe<uint32_t> mLastSeenFrameWidth; - Maybe<uint32_t> mLastSeenFrameHeight; + Maybe<nsIntSize> mLastSeenFrameSize; // This will be populated only if a resolution change occurs, otherwise it // will be left as null so the original metadata is used RefPtr<SharedTrackInfo> mSharedVideoTrackInfo; @@ -276,7 +282,7 @@ private: ~WebMTrackDemuxer(); void UpdateSamples(nsTArray<RefPtr<MediaRawData>>& aSamples); void SetNextKeyFrameTime(); - RefPtr<MediaRawData> NextSample (); + nsresult NextSample(RefPtr<MediaRawData>& aData); RefPtr<WebMDemuxer> mParent; TrackInfo::TrackType mType; UniquePtr<TrackInfo> mInfo; |