summaryrefslogtreecommitdiffstats
path: root/dom
diff options
context:
space:
mode:
authorwolfbeast <mcwerewolf@gmail.com>2018-10-24 11:13:33 +0200
committerwolfbeast <mcwerewolf@gmail.com>2018-10-24 11:13:33 +0200
commit579881ffb4aa0b145c793825cd1b0628e7cd6cdc (patch)
tree65d59fc6b73f120ac1bc2214d4a8442421076b04 /dom
parenta02c44648a3f7d6f3904eebba026ce5e6f781bef (diff)
parentf71c04d814416ebf52dd54109f2d04f1cbd397c0 (diff)
downloadUXP-579881ffb4aa0b145c793825cd1b0628e7cd6cdc.tar
UXP-579881ffb4aa0b145c793825cd1b0628e7cd6cdc.tar.gz
UXP-579881ffb4aa0b145c793825cd1b0628e7cd6cdc.tar.lz
UXP-579881ffb4aa0b145c793825cd1b0628e7cd6cdc.tar.xz
UXP-579881ffb4aa0b145c793825cd1b0628e7cd6cdc.zip
Merge branch 'master' into Sync-weave
Diffstat (limited to 'dom')
-rw-r--r--dom/media/MediaData.h3
-rw-r--r--dom/media/MediaPrefs.h3
-rw-r--r--dom/media/VideoUtils.cpp37
-rw-r--r--dom/media/VideoUtils.h5
-rw-r--r--dom/media/platforms/agnostic/AOMDecoder.cpp332
-rw-r--r--dom/media/platforms/agnostic/AOMDecoder.h62
-rw-r--r--dom/media/platforms/agnostic/AgnosticDecoderModule.cpp19
-rw-r--r--dom/media/platforms/agnostic/VPXDecoder.cpp53
-rw-r--r--dom/media/platforms/agnostic/VPXDecoder.h12
-rw-r--r--dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp1
-rw-r--r--dom/media/platforms/ffmpeg/FFmpegLibWrapper.h3
-rw-r--r--dom/media/platforms/moz.build8
-rw-r--r--dom/media/webm/WebMDecoder.cpp9
-rw-r--r--dom/media/webm/WebMDemuxer.cpp166
-rw-r--r--dom/media/webm/WebMDemuxer.h18
15 files changed, 646 insertions, 85 deletions
diff --git a/dom/media/MediaData.h b/dom/media/MediaData.h
index a79aac6ed..905b4c1d9 100644
--- a/dom/media/MediaData.h
+++ b/dom/media/MediaData.h
@@ -14,6 +14,7 @@
#include "nsIMemoryReporter.h"
#include "SharedBuffer.h"
#include "mozilla/RefPtr.h"
+#include "mozilla/Span.h"
#include "mozilla/UniquePtr.h"
#include "mozilla/UniquePtrExtensions.h"
#include "nsTArray.h"
@@ -631,6 +632,8 @@ public:
{
return sizeof(*this) + mBuffer.ComputedSizeOfExcludingThis();
}
+ // Access the buffer as a Span.
+ operator Span<const uint8_t>() { return MakeSpan(Data(), Size()); }
const CryptoSample& mCrypto;
RefPtr<MediaByteBuffer> mExtraData;
diff --git a/dom/media/MediaPrefs.h b/dom/media/MediaPrefs.h
index b237ecd3d..e67796edd 100644
--- a/dom/media/MediaPrefs.h
+++ b/dom/media/MediaPrefs.h
@@ -120,6 +120,9 @@ private:
#ifdef MOZ_FFVPX
DECL_MEDIA_PREF("media.ffvpx.enabled", PDMFFVPXEnabled, bool, true);
#endif
+#ifdef MOZ_AV1
+ DECL_MEDIA_PREF("media.av1.enabled", AV1Enabled, bool, false);
+#endif
#ifdef XP_WIN
DECL_MEDIA_PREF("media.wmf.enabled", PDMWMFEnabled, bool, true);
DECL_MEDIA_PREF("media.wmf.skip-blacklist", PDMWMFSkipBlacklist, bool, false);
diff --git a/dom/media/VideoUtils.cpp b/dom/media/VideoUtils.cpp
index b1a202c03..c06ba9070 100644
--- a/dom/media/VideoUtils.cpp
+++ b/dom/media/VideoUtils.cpp
@@ -207,8 +207,20 @@ already_AddRefed<SharedThreadPool> GetMediaThreadPool(MediaThreadType aType)
name = "MediaPlayback";
break;
}
- return SharedThreadPool::
+ RefPtr<SharedThreadPool> pool = SharedThreadPool::
Get(nsDependentCString(name), MediaPrefs::MediaThreadPoolDefaultCount());
+
+ // Ensure a larger stack for platform decoder threads
+ if (aType == MediaThreadType::PLATFORM_DECODER) {
+ const uint32_t minStackSize = 512*1024;
+ uint32_t stackSize;
+ MOZ_ALWAYS_SUCCEEDS(pool->GetThreadStackSize(&stackSize));
+ if (stackSize < minStackSize) {
+ MOZ_ALWAYS_SUCCEEDS(pool->SetThreadStackSize(minStackSize));
+ }
+ }
+
+ return pool.forget();
}
bool
@@ -426,6 +438,16 @@ ParseMIMETypeString(const nsAString& aMIMEType,
return ParseCodecsString(codecsStr, aOutCodecs);
}
+template <int N>
+static bool
+StartsWith(const nsACString& string, const char (&prefix)[N])
+{
+ if (N - 1 > string.Length()) {
+ return false;
+ }
+ return memcmp(string.Data(), prefix, N - 1) == 0;
+}
+
bool
IsH264CodecString(const nsAString& aCodec)
{
@@ -458,15 +480,14 @@ IsVP9CodecString(const nsAString& aCodec)
aCodec.EqualsLiteral("vp9.0");
}
-template <int N>
-static bool
-StartsWith(const nsACString& string, const char (&prefix)[N])
+#ifdef MOZ_AV1
+bool
+IsAV1CodecString(const nsAString& aCodec)
{
- if (N - 1 > string.Length()) {
- return false;
- }
- return memcmp(string.Data(), prefix, N - 1) == 0;
+ return aCodec.EqualsLiteral("av1") ||
+ StartsWith(NS_ConvertUTF16toUTF8(aCodec), "av01");
}
+#endif
UniquePtr<TrackInfo>
CreateTrackInfoWithMIMEType(const nsACString& aCodecMIMEType)
diff --git a/dom/media/VideoUtils.h b/dom/media/VideoUtils.h
index 441b63792..aaf0e9903 100644
--- a/dom/media/VideoUtils.h
+++ b/dom/media/VideoUtils.h
@@ -345,6 +345,11 @@ IsVP8CodecString(const nsAString& aCodec);
bool
IsVP9CodecString(const nsAString& aCodec);
+#ifdef MOZ_AV1
+bool
+IsAV1CodecString(const nsAString& aCodec);
+#endif
+
// Try and create a TrackInfo with a given codec MIME type.
UniquePtr<TrackInfo>
CreateTrackInfoWithMIMEType(const nsACString& aCodecMIMEType);
diff --git a/dom/media/platforms/agnostic/AOMDecoder.cpp b/dom/media/platforms/agnostic/AOMDecoder.cpp
new file mode 100644
index 000000000..b5d21375e
--- /dev/null
+++ b/dom/media/platforms/agnostic/AOMDecoder.cpp
@@ -0,0 +1,332 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "AOMDecoder.h"
+#include "MediaResult.h"
+#include "TimeUnits.h"
+#include "aom/aomdx.h"
+#include "aom/aom_image.h"
+#include "gfx2DGlue.h"
+#include "mozilla/PodOperations.h"
+#include "mozilla/SyncRunnable.h"
+#include "nsError.h"
+#include "prsystem.h"
+
+#include <algorithm>
+
+#undef LOG
+#define LOG(arg, ...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, ("AOMDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+namespace mozilla {
+
+using namespace gfx;
+using namespace layers;
+
+AOMDecoder::AOMDecoder(const CreateDecoderParams& aParams)
+ : mImageContainer(aParams.mImageContainer)
+ , mTaskQueue(aParams.mTaskQueue)
+ , mCallback(aParams.mCallback)
+ , mIsFlushing(false)
+ , mInfo(aParams.VideoConfig())
+{
+ PodZero(&mCodec);
+}
+
+AOMDecoder::~AOMDecoder()
+{
+}
+
+void
+AOMDecoder::Shutdown()
+{
+ aom_codec_destroy(&mCodec);
+}
+
+RefPtr<MediaDataDecoder::InitPromise>
+AOMDecoder::Init()
+{
+ int decode_threads = 2;
+
+ aom_codec_iface_t* dx = aom_codec_av1_dx();
+ if (mInfo.mDisplay.width >= 2048) {
+ decode_threads = 8;
+ }
+ else if (mInfo.mDisplay.width >= 1024) {
+ decode_threads = 4;
+ }
+ decode_threads = std::min(decode_threads, PR_GetNumberOfProcessors());
+
+ aom_codec_dec_cfg_t config;
+ PodZero(&config);
+ config.threads = decode_threads;
+ config.w = config.h = 0; // set after decode
+ config.allow_lowbitdepth = true;
+
+ aom_codec_flags_t flags = 0;
+
+ if (!dx || aom_codec_dec_init(&mCodec, dx, &config, flags)) {
+ return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
+ }
+ return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
+}
+
+void
+AOMDecoder::Flush()
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ mIsFlushing = true;
+ nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([this] () {
+ // nothing to do for now.
+ });
+ SyncRunnable::DispatchToThread(mTaskQueue, r);
+ mIsFlushing = false;
+}
+
+// Ported from third_party/aom/tools_common.c.
+static aom_codec_err_t
+highbd_img_downshift(aom_image_t *dst, aom_image_t *src, int down_shift) {
+ int plane;
+ if (dst->d_w != src->d_w || dst->d_h != src->d_h)
+ return AOM_CODEC_INVALID_PARAM;
+ if (dst->x_chroma_shift != src->x_chroma_shift)
+ return AOM_CODEC_INVALID_PARAM;
+ if (dst->y_chroma_shift != src->y_chroma_shift)
+ return AOM_CODEC_INVALID_PARAM;
+ if (dst->fmt != (src->fmt & ~AOM_IMG_FMT_HIGHBITDEPTH))
+ return AOM_CODEC_INVALID_PARAM;
+ if (down_shift < 0)
+ return AOM_CODEC_INVALID_PARAM;
+ switch (dst->fmt) {
+ case AOM_IMG_FMT_I420:
+ case AOM_IMG_FMT_I422:
+ case AOM_IMG_FMT_I444:
+ break;
+ default:
+ return AOM_CODEC_INVALID_PARAM;
+ }
+ switch (src->fmt) {
+ case AOM_IMG_FMT_I42016:
+ case AOM_IMG_FMT_I42216:
+ case AOM_IMG_FMT_I44416:
+ break;
+ default:
+ // We don't support anything that's not 16 bit
+ return AOM_CODEC_UNSUP_BITSTREAM;
+ }
+ for (plane = 0; plane < 3; plane++) {
+ int w = src->d_w;
+ int h = src->d_h;
+ int x, y;
+ if (plane) {
+ w = (w + src->x_chroma_shift) >> src->x_chroma_shift;
+ h = (h + src->y_chroma_shift) >> src->y_chroma_shift;
+ }
+ for (y = 0; y < h; y++) {
+ uint16_t *p_src =
+ (uint16_t *)(src->planes[plane] + y * src->stride[plane]);
+ uint8_t *p_dst =
+ dst->planes[plane] + y * dst->stride[plane];
+ for (x = 0; x < w; x++) *p_dst++ = (*p_src++ >> down_shift) & 0xFF;
+ }
+ }
+ return AOM_CODEC_OK;
+}
+
+// UniquePtr dtor wrapper for aom_image_t.
+struct AomImageFree {
+ void operator()(aom_image_t* img) { aom_img_free(img); }
+};
+
+MediaResult
+AOMDecoder::DoDecode(MediaRawData* aSample)
+{
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+
+#if defined(DEBUG)
+ NS_ASSERTION(IsKeyframe(*aSample) == aSample->mKeyframe,
+ "AOM Decode Keyframe error sample->mKeyframe and si.si_kf out of sync");
+#endif
+
+ if (aom_codec_err_t r = aom_codec_decode(&mCodec, aSample->Data(), aSample->Size(), nullptr)) {
+ LOG("AOM Decode error: %s", aom_codec_err_to_string(r));
+ return MediaResult(
+ NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("AOM error decoding AV1 sample: %s", aom_codec_err_to_string(r)));
+ }
+
+ aom_codec_iter_t iter = nullptr;
+ aom_image_t *img;
+ UniquePtr<aom_image_t, AomImageFree> img8;
+
+ while ((img = aom_codec_get_frame(&mCodec, &iter))) {
+ // Track whether the underlying buffer is 8 or 16 bits per channel.
+ bool highbd = bool(img->fmt & AOM_IMG_FMT_HIGHBITDEPTH);
+ if (highbd) {
+ // Downsample images with more than 8 bits per channel.
+ aom_img_fmt_t fmt8 = static_cast<aom_img_fmt_t>(img->fmt ^ AOM_IMG_FMT_HIGHBITDEPTH);
+ img8.reset(aom_img_alloc(NULL, fmt8, img->d_w, img->d_h, 16));
+ if (img8 == nullptr) {
+ LOG("Couldn't allocate bitdepth reduction target!");
+ return MediaResult(
+ NS_ERROR_OUT_OF_MEMORY,
+ RESULT_DETAIL("Couldn't allocate conversion buffer for AV1 frame"));
+ }
+ if (aom_codec_err_t r = highbd_img_downshift(img8.get(), img, img->bit_depth - 8)) {
+ return MediaResult(
+ NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("Error converting AV1 frame to 8 bits: %s",
+ aom_codec_err_to_string(r)));
+ }
+ // img normally points to storage owned by mCodec, so it is not freed.
+ // To copy out the contents of img8 we can overwrite img with an alias.
+ // Since img is assigned at the start of the while loop and img8 is held
+ // outside that loop, the alias won't outlive the storage it points to.
+ img = img8.get();
+ highbd = false;
+ }
+
+ NS_ASSERTION(img->fmt == AOM_IMG_FMT_I420 ||
+ img->fmt == AOM_IMG_FMT_I42016 ||
+ img->fmt == AOM_IMG_FMT_I444 ||
+ img->fmt == AOM_IMG_FMT_I44416,
+ "AV1 image format not I420 or I444");
+
+ // Chroma shifts are rounded down as per the decoding examples in the SDK
+ VideoData::YCbCrBuffer b;
+ b.mPlanes[0].mData = img->planes[0];
+ b.mPlanes[0].mStride = img->stride[0];
+ b.mPlanes[0].mHeight = img->d_h;
+ b.mPlanes[0].mWidth = img->d_w;
+ b.mPlanes[0].mOffset = 0;
+ b.mPlanes[0].mSkip = highbd ? 1 : 0;
+
+ b.mPlanes[1].mData = img->planes[1];
+ b.mPlanes[1].mStride = img->stride[1];
+ b.mPlanes[1].mOffset = 0;
+ b.mPlanes[1].mSkip = highbd ? 1 : 0;
+
+ b.mPlanes[2].mData = img->planes[2];
+ b.mPlanes[2].mStride = img->stride[2];
+ b.mPlanes[2].mOffset = 0;
+ b.mPlanes[2].mSkip = highbd ? 1 : 0;
+
+ if (img->fmt == AOM_IMG_FMT_I420 ||
+ img->fmt == AOM_IMG_FMT_I42016) {
+ b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
+ b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
+
+ b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
+ b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
+ } else if (img->fmt == AOM_IMG_FMT_I444) {
+ b.mPlanes[1].mHeight = img->d_h;
+ b.mPlanes[1].mWidth = img->d_w;
+
+ b.mPlanes[2].mHeight = img->d_h;
+ b.mPlanes[2].mWidth = img->d_w;
+ } else {
+ LOG("AOM Unknown image format");
+ return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("AOM Unknown image format"));
+ }
+
+ RefPtr<VideoData> v =
+ VideoData::CreateAndCopyData(mInfo,
+ mImageContainer,
+ aSample->mOffset,
+ aSample->mTime,
+ aSample->mDuration,
+ b,
+ aSample->mKeyframe,
+ aSample->mTimecode,
+ mInfo.ScaledImageRect(img->d_w,
+ img->d_h));
+
+ if (!v) {
+ LOG("Image allocation error source %ux%u display %ux%u picture %ux%u",
+ img->d_w, img->d_h, mInfo.mDisplay.width, mInfo.mDisplay.height,
+ mInfo.mImage.width, mInfo.mImage.height);
+ return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
+ }
+ mCallback->Output(v);
+ }
+ return NS_OK;
+}
+
+void
+AOMDecoder::ProcessDecode(MediaRawData* aSample)
+{
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ if (mIsFlushing) {
+ return;
+ }
+ MediaResult rv = DoDecode(aSample);
+ if (NS_FAILED(rv)) {
+ mCallback->Error(rv);
+ } else {
+ mCallback->InputExhausted();
+ }
+}
+
+void
+AOMDecoder::Input(MediaRawData* aSample)
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ mTaskQueue->Dispatch(NewRunnableMethod<RefPtr<MediaRawData>>(
+ this, &AOMDecoder::ProcessDecode, aSample));
+}
+
+void
+AOMDecoder::ProcessDrain()
+{
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ mCallback->DrainComplete();
+}
+
+void
+AOMDecoder::Drain()
+{
+ MOZ_ASSERT(mCallback->OnReaderTaskQueue());
+ mTaskQueue->Dispatch(NewRunnableMethod(this, &AOMDecoder::ProcessDrain));
+}
+
+/* static */
+bool
+AOMDecoder::IsAV1(const nsACString& aMimeType)
+{
+ return aMimeType.EqualsLiteral("video/webm; codecs=av1") ||
+ aMimeType.EqualsLiteral("video/av1");
+}
+
+/* static */
+bool
+AOMDecoder::IsKeyframe(Span<const uint8_t> aBuffer) {
+ aom_codec_stream_info_t info;
+ PodZero(&info);
+
+ aom_codec_peek_stream_info(aom_codec_av1_dx(),
+ aBuffer.Elements(),
+ aBuffer.Length(),
+ &info);
+
+ return bool(info.is_kf);
+}
+
+/* static */
+nsIntSize
+AOMDecoder::GetFrameSize(Span<const uint8_t> aBuffer) {
+ aom_codec_stream_info_t info;
+ PodZero(&info);
+
+ aom_codec_peek_stream_info(aom_codec_av1_dx(),
+ aBuffer.Elements(),
+ aBuffer.Length(),
+ &info);
+
+ return nsIntSize(info.w, info.h);
+}
+
+} // namespace mozilla
+#undef LOG
diff --git a/dom/media/platforms/agnostic/AOMDecoder.h b/dom/media/platforms/agnostic/AOMDecoder.h
new file mode 100644
index 000000000..ec6b1f95a
--- /dev/null
+++ b/dom/media/platforms/agnostic/AOMDecoder.h
@@ -0,0 +1,62 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(AOMDecoder_h_)
+#define AOMDecoder_h_
+
+#include "PlatformDecoderModule.h"
+#include "mozilla/Span.h"
+
+#include <stdint.h>
+#include "aom/aom_decoder.h"
+
+namespace mozilla {
+
+class AOMDecoder : public MediaDataDecoder
+{
+public:
+ explicit AOMDecoder(const CreateDecoderParams& aParams);
+
+ RefPtr<InitPromise> Init() override;
+ void Input(MediaRawData* aSample) override;
+ void Flush() override;
+ void Drain() override;
+ void Shutdown() override;
+ const char* GetDescriptionName() const override
+ {
+ return "libaom (AV1) video decoder";
+ }
+
+ // Return true if aMimeType is a one of the strings used
+ // by our demuxers to identify AV1 streams.
+ static bool IsAV1(const nsACString& aMimeType);
+
+ // Return true if a sample is a keyframe.
+ static bool IsKeyframe(Span<const uint8_t> aBuffer);
+
+ // Return the frame dimensions for a sample.
+ static nsIntSize GetFrameSize(Span<const uint8_t> aBuffer);
+
+private:
+ ~AOMDecoder();
+ void ProcessDecode(MediaRawData* aSample);
+ MediaResult DoDecode(MediaRawData* aSample);
+ void ProcessDrain();
+
+ const RefPtr<layers::ImageContainer> mImageContainer;
+ const RefPtr<TaskQueue> mTaskQueue;
+ MediaDataDecoderCallback* mCallback;
+ Atomic<bool> mIsFlushing;
+
+ // AOM decoder state
+ aom_codec_ctx_t mCodec;
+
+ const VideoInfo& mInfo;
+};
+
+} // namespace mozilla
+
+#endif // AOMDecoder_h_
diff --git a/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp b/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
index 7bd75b7fe..cf5ed3d22 100644
--- a/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
+++ b/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
@@ -5,6 +5,7 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AgnosticDecoderModule.h"
+#include "MediaPrefs.h"
#include "mozilla/Logging.h"
#include "OpusDecoder.h"
#include "VorbisDecoder.h"
@@ -12,6 +13,10 @@
#include "WAVDecoder.h"
#include "TheoraDecoder.h"
+#ifdef MOZ_AV1
+#include "AOMDecoder.h"
+#endif
+
namespace mozilla {
bool
@@ -24,6 +29,11 @@ AgnosticDecoderModule::SupportsMimeType(const nsACString& aMimeType,
VorbisDataDecoder::IsVorbis(aMimeType) ||
WaveDataDecoder::IsWave(aMimeType) ||
TheoraDecoder::IsTheora(aMimeType);
+#ifdef MOZ_AV1
+ if (MediaPrefs::AV1Enabled()) {
+ supports |= AOMDecoder::IsAV1(aMimeType);
+ }
+#endif
MOZ_LOG(sPDMLog, LogLevel::Debug, ("Agnostic decoder %s requested type",
supports ? "supports" : "rejects"));
return supports;
@@ -36,7 +46,14 @@ AgnosticDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams)
if (VPXDecoder::IsVPX(aParams.mConfig.mMimeType)) {
m = new VPXDecoder(aParams);
- } else if (TheoraDecoder::IsTheora(aParams.mConfig.mMimeType)) {
+ }
+#ifdef MOZ_AV1
+ else if (AOMDecoder::IsAV1(aParams.mConfig.mMimeType) &&
+ MediaPrefs::AV1Enabled()) {
+ m = new AOMDecoder(aParams);
+ }
+#endif
+ else if (TheoraDecoder::IsTheora(aParams.mConfig.mMimeType)) {
m = new TheoraDecoder(aParams);
}
diff --git a/dom/media/platforms/agnostic/VPXDecoder.cpp b/dom/media/platforms/agnostic/VPXDecoder.cpp
index 77c81b51b..f2f84487f 100644
--- a/dom/media/platforms/agnostic/VPXDecoder.cpp
+++ b/dom/media/platforms/agnostic/VPXDecoder.cpp
@@ -22,7 +22,7 @@ namespace mozilla {
using namespace gfx;
using namespace layers;
-static int MimeTypeToCodec(const nsACString& aMimeType)
+static VPXDecoder::Codec MimeTypeToCodec(const nsACString& aMimeType)
{
if (aMimeType.EqualsLiteral("video/webm; codecs=vp8")) {
return VPXDecoder::Codec::VP8;
@@ -31,7 +31,7 @@ static int MimeTypeToCodec(const nsACString& aMimeType)
} else if (aMimeType.EqualsLiteral("video/vp9")) {
return VPXDecoder::Codec::VP9;
}
- return -1;
+ return VPXDecoder::Codec::Unknown;
}
VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams)
@@ -101,17 +101,10 @@ MediaResult
VPXDecoder::DoDecode(MediaRawData* aSample)
{
MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+
#if defined(DEBUG)
- vpx_codec_stream_info_t si;
- PodZero(&si);
- si.sz = sizeof(si);
- if (mCodec == Codec::VP8) {
- vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), aSample->Data(), aSample->Size(), &si);
- } else if (mCodec == Codec::VP9) {
- vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), aSample->Data(), aSample->Size(), &si);
- }
- NS_ASSERTION(bool(si.is_kf) == aSample->mKeyframe,
- "VPX Decode Keyframe error sample->mKeyframe and si.si_kf out of sync");
+ NS_ASSERTION(IsKeyframe(*aSample, mCodec) == aSample->mKeyframe,
+ "VPX Decode Keyframe error sample->mKeyframe and sample data out of sync");
#endif
if (vpx_codec_err_t r = vpx_codec_decode(&mVPX, aSample->Data(), aSample->Size(), nullptr, 0)) {
@@ -249,5 +242,41 @@ VPXDecoder::IsVP9(const nsACString& aMimeType)
return IsVPX(aMimeType, VPXDecoder::VP9);
}
+/* static */
+bool
+VPXDecoder::IsKeyframe(Span<const uint8_t> aBuffer, Codec aCodec)
+{
+ vpx_codec_stream_info_t si;
+ PodZero(&si);
+ si.sz = sizeof(si);
+
+ if (aCodec == Codec::VP8) {
+ vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), aBuffer.Elements(), aBuffer.Length(), &si);
+ return bool(si.is_kf);
+ } else if (aCodec == Codec::VP9) {
+ vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), aBuffer.Elements(), aBuffer.Length(), &si);
+ return bool(si.is_kf);
+ }
+
+ return false;
+}
+
+/* static */
+nsIntSize
+VPXDecoder::GetFrameSize(Span<const uint8_t> aBuffer, Codec aCodec)
+{
+ vpx_codec_stream_info_t si;
+ PodZero(&si);
+ si.sz = sizeof(si);
+
+ if (aCodec == Codec::VP8) {
+ vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), aBuffer.Elements(), aBuffer.Length(), &si);
+ } else if (aCodec == Codec::VP9) {
+ vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), aBuffer.Elements(), aBuffer.Length(), &si);
+ }
+
+ return nsIntSize(si.w, si.h);
+}
+
} // namespace mozilla
#undef LOG
diff --git a/dom/media/platforms/agnostic/VPXDecoder.h b/dom/media/platforms/agnostic/VPXDecoder.h
index d420ec069..4e8d83617 100644
--- a/dom/media/platforms/agnostic/VPXDecoder.h
+++ b/dom/media/platforms/agnostic/VPXDecoder.h
@@ -7,6 +7,7 @@
#define VPXDecoder_h_
#include "PlatformDecoderModule.h"
+#include "mozilla/Span.h"
#include <stdint.h>
#define VPX_DONT_DEFINE_STDINT_TYPES
@@ -36,7 +37,8 @@ public:
enum Codec: uint8_t {
VP8 = 1 << 0,
- VP9 = 1 << 1
+ VP9 = 1 << 1,
+ Unknown = 1 << 7,
};
// Return true if aMimeType is a one of the strings used by our demuxers to
@@ -46,6 +48,12 @@ public:
static bool IsVP8(const nsACString& aMimeType);
static bool IsVP9(const nsACString& aMimeType);
+ // Return true if a sample is a keyframe for the specified codec.
+ static bool IsKeyframe(Span<const uint8_t> aBuffer, Codec aCodec);
+
+ // Return the frame dimensions for a sample for the specified codec.
+ static nsIntSize GetFrameSize(Span<const uint8_t> aBuffer, Codec aCodec);
+
private:
void ProcessDecode(MediaRawData* aSample);
MediaResult DoDecode(MediaRawData* aSample);
@@ -61,7 +69,7 @@ private:
const VideoInfo& mInfo;
- const int mCodec;
+ const Codec mCodec;
};
} // namespace mozilla
diff --git a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp
index 6302882a6..e1c326818 100644
--- a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp
@@ -7,6 +7,7 @@
#include "MediaPrefs.h"
#include "mozilla/PodOperations.h"
#include "mozilla/Types.h"
+#include "PlatformDecoderModule.h"
#include "prlink.h"
#define AV_LOG_DEBUG 48
diff --git a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h
index d6944a1d8..c6c43a4ae 100644
--- a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h
+++ b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.h
@@ -5,6 +5,7 @@
#ifndef __FFmpegLibWrapper_h__
#define __FFmpegLibWrapper_h__
+#include "mozilla/Attributes.h"
#include "mozilla/Types.h"
struct AVCodec;
@@ -91,4 +92,4 @@ private:
} // namespace mozilla
-#endif // FFmpegLibWrapper \ No newline at end of file
+#endif // FFmpegLibWrapper
diff --git a/dom/media/platforms/moz.build b/dom/media/platforms/moz.build
index 3fb0cc842..be13d31c4 100644
--- a/dom/media/platforms/moz.build
+++ b/dom/media/platforms/moz.build
@@ -55,6 +55,14 @@ if CONFIG['MOZ_FFMPEG']:
'ffmpeg',
]
+if CONFIG['MOZ_AV1']:
+ EXPORTS += [
+ 'agnostic/AOMDecoder.h',
+ ]
+ UNIFIED_SOURCES += [
+ 'agnostic/AOMDecoder.cpp',
+ ]
+
if CONFIG['MOZ_APPLEMEDIA']:
EXPORTS += [
'apple/AppleDecoderModule.h',
diff --git a/dom/media/webm/WebMDecoder.cpp b/dom/media/webm/WebMDecoder.cpp
index b41de6d40..9575d6e42 100644
--- a/dom/media/webm/WebMDecoder.cpp
+++ b/dom/media/webm/WebMDecoder.cpp
@@ -5,6 +5,10 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/Preferences.h"
+#ifdef MOZ_AV1
+#include "AOMDecoder.h"
+#endif
+#include "MediaPrefs.h"
#include "MediaDecoderStateMachine.h"
#include "WebMDemuxer.h"
#include "WebMDecoder.h"
@@ -65,6 +69,11 @@ WebMDecoder::CanHandleMediaType(const nsACString& aMIMETypeExcludingCodecs,
continue;
}
+#ifdef MOZ_AV1
+ if (MediaPrefs::AV1Enabled() && IsAV1CodecString(codec)) {
+ continue;
+ }
+#endif
// Some unsupported codec.
return false;
}
diff --git a/dom/media/webm/WebMDemuxer.cpp b/dom/media/webm/WebMDemuxer.cpp
index 20ed71581..013da9b2c 100644
--- a/dom/media/webm/WebMDemuxer.cpp
+++ b/dom/media/webm/WebMDemuxer.cpp
@@ -8,7 +8,11 @@
#include "MediaDecoderStateMachine.h"
#include "AbstractMediaDecoder.h"
#include "MediaResource.h"
+#ifdef MOZ_AV1
+#include "AOMDecoder.h"
+#endif
#include "OpusDecoder.h"
+#include "VPXDecoder.h"
#include "WebMDemuxer.h"
#include "WebMBufferedParser.h"
#include "gfx2DGlue.h"
@@ -24,12 +28,9 @@
#include "mozilla/Sprintf.h"
#include <algorithm>
+#include <numeric>
#include <stdint.h>
-#define VPX_DONT_DEFINE_STDINT_TYPES
-#include "vpx/vp8dx.h"
-#include "vpx/vpx_decoder.h"
-
#define WEBM_DEBUG(arg, ...) MOZ_LOG(gMediaDemuxerLog, mozilla::LogLevel::Debug, ("WebMDemuxer(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
extern mozilla::LazyLogModule gMediaDemuxerLog;
@@ -322,6 +323,9 @@ WebMDemuxer::ReadMetadata()
case NESTEGG_CODEC_VP9:
mInfo.mVideo.mMimeType = "video/webm; codecs=vp9";
break;
+ case NESTEGG_CODEC_AV1:
+ mInfo.mVideo.mMimeType = "video/webm; codecs=av1";
+ break;
default:
NS_WARNING("Unknown WebM video codec");
return NS_ERROR_FAILURE;
@@ -549,7 +553,7 @@ WebMDemuxer::GetTrackCrypto(TrackInfo::TrackType aType, size_t aTrackNumber) {
return crypto;
}
-bool
+nsresult
WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSamples)
{
if (mIsMediaSource) {
@@ -557,17 +561,18 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
EnsureUpToDateIndex();
}
- RefPtr<NesteggPacketHolder> holder(NextPacket(aType));
+ RefPtr<NesteggPacketHolder> holder;
+ nsresult rv = NextPacket(aType, holder);
- if (!holder) {
- return false;
+ if (NS_FAILED(rv)) {
+ return rv;
}
int r = 0;
unsigned int count = 0;
r = nestegg_packet_count(holder->Packet(), &count);
if (r == -1) {
- return false;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
int64_t tstamp = holder->Timestamp();
int64_t duration = holder->Duration();
@@ -578,7 +583,11 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
// video frame.
int64_t next_tstamp = INT64_MIN;
if (aType == TrackInfo::kAudioTrack) {
- RefPtr<NesteggPacketHolder> next_holder(NextPacket(aType));
+ RefPtr<NesteggPacketHolder> next_holder;
+ rv = NextPacket(aType, next_holder);
+ if (NS_FAILED(rv) && rv != NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+ return rv;
+ }
if (next_holder) {
next_tstamp = next_holder->Timestamp();
PushAudioPacket(next_holder);
@@ -593,7 +602,11 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
}
mLastAudioFrameTime = Some(tstamp);
} else if (aType == TrackInfo::kVideoTrack) {
- RefPtr<NesteggPacketHolder> next_holder(NextPacket(aType));
+ RefPtr<NesteggPacketHolder> next_holder;
+ rv = NextPacket(aType, next_holder);
+ if (NS_FAILED(rv) && rv != NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+ return rv;
+ }
if (next_holder) {
next_tstamp = next_holder->Timestamp();
PushVideoPacket(next_holder);
@@ -610,7 +623,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
}
if (mIsMediaSource && next_tstamp == INT64_MIN) {
- return false;
+ return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
}
int64_t discardPadding = 0;
@@ -626,7 +639,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
r = nestegg_packet_data(holder->Packet(), i, &data, &length);
if (r == -1) {
WEBM_DEBUG("nestegg_packet_data failed r=%d", r);
- return false;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
bool isKeyframe = false;
if (aType == TrackInfo::kAudioTrack) {
@@ -636,29 +649,46 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
// Packet is encrypted, can't peek, use packet info
isKeyframe = nestegg_packet_has_keyframe(holder->Packet()) == NESTEGG_PACKET_HAS_KEYFRAME_TRUE;
} else {
- vpx_codec_stream_info_t si;
- PodZero(&si);
- si.sz = sizeof(si);
+ auto sample = MakeSpan(data, length);
switch (mVideoCodec) {
case NESTEGG_CODEC_VP8:
- vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
+ isKeyframe = VPXDecoder::IsKeyframe(sample, VPXDecoder::Codec::VP8);
break;
case NESTEGG_CODEC_VP9:
- vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
+ isKeyframe = VPXDecoder::IsKeyframe(sample, VPXDecoder::Codec::VP9);
+ break;
+#ifdef MOZ_AV1
+ case NESTEGG_CODEC_AV1:
+ isKeyframe = AOMDecoder::IsKeyframe(sample);
break;
+#endif
+ default:
+ NS_WARNING("Cannot detect keyframes in unknown WebM video codec");
+ return NS_ERROR_FAILURE;
}
- isKeyframe = si.is_kf;
if (isKeyframe) {
- // We only look for resolution changes on keyframes for both VP8 and
- // VP9. Other resolution changes are invalid.
- if (mLastSeenFrameWidth.isSome() && mLastSeenFrameHeight.isSome() &&
- (si.w != mLastSeenFrameWidth.value() ||
- si.h != mLastSeenFrameHeight.value())) {
- mInfo.mVideo.mDisplay = nsIntSize(si.w, si.h);
+ // For both VP8 and VP9, we only look for resolution changes
+ // on keyframes. Other resolution changes are invalid.
+ auto dimensions = nsIntSize(0, 0);
+ switch (mVideoCodec) {
+ case NESTEGG_CODEC_VP8:
+ dimensions = VPXDecoder::GetFrameSize(sample, VPXDecoder::Codec::VP8);
+ break;
+ case NESTEGG_CODEC_VP9:
+ dimensions = VPXDecoder::GetFrameSize(sample, VPXDecoder::Codec::VP9);
+ break;
+#ifdef MOZ_AV1
+ case NESTEGG_CODEC_AV1:
+ dimensions = AOMDecoder::GetFrameSize(sample);
+ break;
+#endif
+ }
+ if (mLastSeenFrameSize.isSome()
+ && (dimensions != mLastSeenFrameSize.value())) {
+ mInfo.mVideo.mDisplay = dimensions;
mSharedVideoTrackInfo = new SharedTrackInfo(mInfo.mVideo, ++sStreamSourceID);
}
- mLastSeenFrameWidth = Some(si.w);
- mLastSeenFrameHeight = Some(si.h);
+ mLastSeenFrameSize = Some(dimensions);
}
}
}
@@ -668,7 +698,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
RefPtr<MediaRawData> sample = new MediaRawData(data, length);
if (length && !sample->Data()) {
// OOM.
- return false;
+ return NS_ERROR_OUT_OF_MEMORY;
}
sample->mTimecode = tstamp;
sample->mTime = tstamp;
@@ -721,11 +751,12 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
}
aSamples->Push(sample);
}
- return true;
+ return NS_OK;
}
-RefPtr<NesteggPacketHolder>
-WebMDemuxer::NextPacket(TrackInfo::TrackType aType)
+nsresult
+WebMDemuxer::NextPacket(TrackInfo::TrackType aType,
+ RefPtr<NesteggPacketHolder>& aPacket)
{
bool isVideo = aType == TrackInfo::kVideoTrack;
@@ -734,56 +765,64 @@ WebMDemuxer::NextPacket(TrackInfo::TrackType aType)
bool hasType = isVideo ? mHasVideo : mHasAudio;
if (!hasType) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
// The packet queue for the type that we are interested in.
WebMPacketQueue &packets = isVideo ? mVideoPackets : mAudioPackets;
if (packets.GetSize() > 0) {
- return packets.PopFront();
+ aPacket = packets.PopFront();
+ return NS_OK;
}
// Track we are interested in
uint32_t ourTrack = isVideo ? mVideoTrack : mAudioTrack;
do {
- RefPtr<NesteggPacketHolder> holder = DemuxPacket(aType);
+ RefPtr<NesteggPacketHolder> holder;
+ nsresult rv = DemuxPacket(aType, holder);
+ if (NS_FAILED(rv)) {
+ return rv;
+ }
if (!holder) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
if (ourTrack == holder->Track()) {
- return holder;
+ aPacket = holder;
+ return NS_OK;
}
} while (true);
}
-RefPtr<NesteggPacketHolder>
-WebMDemuxer::DemuxPacket(TrackInfo::TrackType aType)
+nsresult
+WebMDemuxer::DemuxPacket(TrackInfo::TrackType aType,
+ RefPtr<NesteggPacketHolder>& aPacket)
{
nestegg_packet* packet;
int r = nestegg_read_packet(Context(aType), &packet);
if (r == 0) {
nestegg_read_reset(Context(aType));
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
} else if (r < 0) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
unsigned int track = 0;
r = nestegg_packet_track(packet, &track);
if (r == -1) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
int64_t offset = Resource(aType).Tell();
RefPtr<NesteggPacketHolder> holder = new NesteggPacketHolder();
if (!holder->Init(packet, offset, track, false)) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
- return holder;
+ aPacket = holder;
+ return NS_OK;
}
void
@@ -943,7 +982,14 @@ WebMTrackDemuxer::Seek(media::TimeUnit aTime)
media::TimeUnit seekTime = aTime;
mSamples.Reset();
mParent->SeekInternal(mType, aTime);
- mParent->GetNextPacket(mType, &mSamples);
+ nsresult rv = mParent->GetNextPacket(mType, &mSamples);
+ if (NS_FAILED(rv)) {
+ if (rv == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+ // Ignore the error for now, the next GetSample will be rejected with EOS.
+ return SeekPromise::CreateAndResolve(media::TimeUnit(), __func__);
+ }
+ return SeekPromise::CreateAndReject(rv, __func__);
+ }
mNeedKeyframe = true;
// Check what time we actually seeked to.
@@ -956,15 +1002,18 @@ WebMTrackDemuxer::Seek(media::TimeUnit aTime)
return SeekPromise::CreateAndResolve(seekTime, __func__);
}
-RefPtr<MediaRawData>
-WebMTrackDemuxer::NextSample()
+nsresult
+WebMTrackDemuxer::NextSample(RefPtr<MediaRawData>& aData)
{
- while (mSamples.GetSize() < 1 && mParent->GetNextPacket(mType, &mSamples)) {
+ nsresult rv;
+ while (mSamples.GetSize() < 1 &&
+ NS_SUCCEEDED((rv = mParent->GetNextPacket(mType, &mSamples)))) {
}
if (mSamples.GetSize()) {
- return mSamples.PopFront();
+ aData = mSamples.PopFront();
+ return NS_OK;
}
- return nullptr;
+ return rv;
}
RefPtr<WebMTrackDemuxer::SamplesPromise>
@@ -973,9 +1022,12 @@ WebMTrackDemuxer::GetSamples(int32_t aNumSamples)
RefPtr<SamplesHolder> samples = new SamplesHolder;
MOZ_ASSERT(aNumSamples);
+ nsresult rv = NS_ERROR_DOM_MEDIA_END_OF_STREAM;
+
while (aNumSamples) {
- RefPtr<MediaRawData> sample(NextSample());
- if (!sample) {
+ RefPtr<MediaRawData> sample;
+ rv = NextSample(sample);
+ if (NS_FAILED(rv)) {
break;
}
if (mNeedKeyframe && !sample->mKeyframe) {
@@ -987,7 +1039,7 @@ WebMTrackDemuxer::GetSamples(int32_t aNumSamples)
}
if (samples->mSamples.IsEmpty()) {
- return SamplesPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_END_OF_STREAM, __func__);
+ return SamplesPromise::CreateAndReject(rv, __func__);
} else {
UpdateSamples(samples->mSamples);
return SamplesPromise::CreateAndResolve(samples, __func__);
@@ -1022,7 +1074,8 @@ WebMTrackDemuxer::SetNextKeyFrameTime()
}
// Demux and buffer frames until we find a keyframe.
RefPtr<MediaRawData> sample;
- while (!foundKeyframe && (sample = NextSample())) {
+ nsresult rv = NS_OK;
+ while (!foundKeyframe && NS_SUCCEEDED((rv = NextSample(sample)))) {
if (sample->mKeyframe) {
frameTime = sample->mTime;
foundKeyframe = true;
@@ -1104,10 +1157,11 @@ WebMTrackDemuxer::SkipToNextRandomAccessPoint(media::TimeUnit aTimeThreshold)
uint32_t parsed = 0;
bool found = false;
RefPtr<MediaRawData> sample;
+ nsresult rv = NS_OK;
int64_t sampleTime;
WEBM_DEBUG("TimeThreshold: %f", aTimeThreshold.ToSeconds());
- while (!found && (sample = NextSample())) {
+ while (!found && NS_SUCCEEDED((rv = NextSample(sample)))) {
parsed++;
sampleTime = sample->mTime;
if (sample->mKeyframe && sampleTime >= aTimeThreshold.ToMicroseconds()) {
@@ -1116,7 +1170,9 @@ WebMTrackDemuxer::SkipToNextRandomAccessPoint(media::TimeUnit aTimeThreshold)
mSamples.PushFront(sample.forget());
}
}
- SetNextKeyFrameTime();
+ if (NS_SUCCEEDED(rv)) {
+ SetNextKeyFrameTime();
+ }
if (found) {
WEBM_DEBUG("next sample: %f (parsed: %d)",
media::TimeUnit::FromMicroseconds(sampleTime).ToSeconds(),
diff --git a/dom/media/webm/WebMDemuxer.h b/dom/media/webm/WebMDemuxer.h
index 6fff38e7d..36d381e57 100644
--- a/dom/media/webm/WebMDemuxer.h
+++ b/dom/media/webm/WebMDemuxer.h
@@ -8,9 +8,13 @@
#include "nsTArray.h"
#include "MediaDataDemuxer.h"
+#include "MediaResource.h"
#include "NesteggPacketHolder.h"
#include "mozilla/Move.h"
+#include <deque>
+#include <stdint.h>
+
typedef struct nestegg nestegg;
namespace mozilla {
@@ -111,7 +115,8 @@ public:
bool GetOffsetForTime(uint64_t aTime, int64_t* aOffset);
// Demux next WebM packet and append samples to MediaRawDataQueue
- bool GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSamples);
+ nsresult GetNextPacket(TrackInfo::TrackType aType,
+ MediaRawDataQueue *aSamples);
nsresult Reset(TrackInfo::TrackType aType);
@@ -175,11 +180,13 @@ private:
// Read a packet from the nestegg file. Returns nullptr if all packets for
// the particular track have been read. Pass TrackInfo::kVideoTrack or
// TrackInfo::kVideoTrack to indicate the type of the packet we want to read.
- RefPtr<NesteggPacketHolder> NextPacket(TrackInfo::TrackType aType);
+ nsresult NextPacket(TrackInfo::TrackType aType,
+ RefPtr<NesteggPacketHolder>& aPacket);
// Internal method that demuxes the next packet from the stream. The caller
// is responsible for making sure it doesn't get lost.
- RefPtr<NesteggPacketHolder> DemuxPacket(TrackInfo::TrackType aType);
+ nsresult DemuxPacket(TrackInfo::TrackType aType,
+ RefPtr<NesteggPacketHolder>& aPacket);
// libnestegg audio and video context for webm container.
// Access on reader's thread only.
@@ -237,8 +244,7 @@ private:
int64_t mLastWebMBlockOffset;
const bool mIsMediaSource;
- Maybe<uint32_t> mLastSeenFrameWidth;
- Maybe<uint32_t> mLastSeenFrameHeight;
+ Maybe<nsIntSize> mLastSeenFrameSize;
// This will be populated only if a resolution change occurs, otherwise it
// will be left as null so the original metadata is used
RefPtr<SharedTrackInfo> mSharedVideoTrackInfo;
@@ -276,7 +282,7 @@ private:
~WebMTrackDemuxer();
void UpdateSamples(nsTArray<RefPtr<MediaRawData>>& aSamples);
void SetNextKeyFrameTime();
- RefPtr<MediaRawData> NextSample ();
+ nsresult NextSample(RefPtr<MediaRawData>& aData);
RefPtr<WebMDemuxer> mParent;
TrackInfo::TrackType mType;
UniquePtr<TrackInfo> mInfo;