summaryrefslogtreecommitdiffstats
path: root/dom/media/webm/WebMDemuxer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/webm/WebMDemuxer.cpp')
-rw-r--r--dom/media/webm/WebMDemuxer.cpp190
1 files changed, 135 insertions, 55 deletions
diff --git a/dom/media/webm/WebMDemuxer.cpp b/dom/media/webm/WebMDemuxer.cpp
index 20ed71581..84b4b506e 100644
--- a/dom/media/webm/WebMDemuxer.cpp
+++ b/dom/media/webm/WebMDemuxer.cpp
@@ -8,7 +8,11 @@
#include "MediaDecoderStateMachine.h"
#include "AbstractMediaDecoder.h"
#include "MediaResource.h"
+#ifdef MOZ_AV1
+#include "AOMDecoder.h"
+#endif
#include "OpusDecoder.h"
+#include "VPXDecoder.h"
#include "WebMDemuxer.h"
#include "WebMBufferedParser.h"
#include "gfx2DGlue.h"
@@ -24,12 +28,9 @@
#include "mozilla/Sprintf.h"
#include <algorithm>
+#include <numeric>
#include <stdint.h>
-#define VPX_DONT_DEFINE_STDINT_TYPES
-#include "vpx/vp8dx.h"
-#include "vpx/vpx_decoder.h"
-
#define WEBM_DEBUG(arg, ...) MOZ_LOG(gMediaDemuxerLog, mozilla::LogLevel::Debug, ("WebMDemuxer(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
extern mozilla::LazyLogModule gMediaDemuxerLog;
@@ -322,6 +323,23 @@ WebMDemuxer::ReadMetadata()
case NESTEGG_CODEC_VP9:
mInfo.mVideo.mMimeType = "video/webm; codecs=vp9";
break;
+ case NESTEGG_CODEC_AV1:
+ mInfo.mVideo.mMimeType = "video/webm; codecs=av1";
+ break;
+ case NESTEGG_CODEC_AVC1: {
+ mInfo.mVideo.mMimeType = "video/webm; codecs=avc1";
+
+ unsigned char* data = 0;
+ size_t length = 0;
+ r = nestegg_track_codec_data(context, track, 0, &data, &length);
+ if (r == -1) {
+ return NS_ERROR_FAILURE;
+ }
+
+ mInfo.mVideo.mExtraData = new MediaByteBuffer(length);
+ mInfo.mVideo.mExtraData->AppendElements(data, length);
+ break;
+ }
default:
NS_WARNING("Unknown WebM video codec");
return NS_ERROR_FAILURE;
@@ -404,6 +422,8 @@ WebMDemuxer::ReadMetadata()
mInfo.mAudio.mMimeType = "audio/opus";
OpusDataDecoder::AppendCodecDelay(mInfo.mAudio.mCodecSpecificConfig,
media::TimeUnit::FromNanoseconds(params.codec_delay).ToMicroseconds());
+ } else if (mAudioCodec == NESTEGG_CODEC_AAC) {
+ mInfo.mAudio.mMimeType = "audio/mp4a-latm";
}
mSeekPreroll = params.seek_preroll;
mInfo.mAudio.mRate = params.rate;
@@ -549,7 +569,7 @@ WebMDemuxer::GetTrackCrypto(TrackInfo::TrackType aType, size_t aTrackNumber) {
return crypto;
}
-bool
+nsresult
WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSamples)
{
if (mIsMediaSource) {
@@ -557,17 +577,18 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
EnsureUpToDateIndex();
}
- RefPtr<NesteggPacketHolder> holder(NextPacket(aType));
+ RefPtr<NesteggPacketHolder> holder;
+ nsresult rv = NextPacket(aType, holder);
- if (!holder) {
- return false;
+ if (NS_FAILED(rv)) {
+ return rv;
}
int r = 0;
unsigned int count = 0;
r = nestegg_packet_count(holder->Packet(), &count);
if (r == -1) {
- return false;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
int64_t tstamp = holder->Timestamp();
int64_t duration = holder->Duration();
@@ -578,7 +599,11 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
// video frame.
int64_t next_tstamp = INT64_MIN;
if (aType == TrackInfo::kAudioTrack) {
- RefPtr<NesteggPacketHolder> next_holder(NextPacket(aType));
+ RefPtr<NesteggPacketHolder> next_holder;
+ rv = NextPacket(aType, next_holder);
+ if (NS_FAILED(rv) && rv != NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+ return rv;
+ }
if (next_holder) {
next_tstamp = next_holder->Timestamp();
PushAudioPacket(next_holder);
@@ -593,7 +618,11 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
}
mLastAudioFrameTime = Some(tstamp);
} else if (aType == TrackInfo::kVideoTrack) {
- RefPtr<NesteggPacketHolder> next_holder(NextPacket(aType));
+ RefPtr<NesteggPacketHolder> next_holder;
+ rv = NextPacket(aType, next_holder);
+ if (NS_FAILED(rv) && rv != NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+ return rv;
+ }
if (next_holder) {
next_tstamp = next_holder->Timestamp();
PushVideoPacket(next_holder);
@@ -610,7 +639,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
}
if (mIsMediaSource && next_tstamp == INT64_MIN) {
- return false;
+ return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
}
int64_t discardPadding = 0;
@@ -626,7 +655,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
r = nestegg_packet_data(holder->Packet(), i, &data, &length);
if (r == -1) {
WEBM_DEBUG("nestegg_packet_data failed r=%d", r);
- return false;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
bool isKeyframe = false;
if (aType == TrackInfo::kAudioTrack) {
@@ -636,29 +665,49 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
// Packet is encrypted, can't peek, use packet info
isKeyframe = nestegg_packet_has_keyframe(holder->Packet()) == NESTEGG_PACKET_HAS_KEYFRAME_TRUE;
} else {
- vpx_codec_stream_info_t si;
- PodZero(&si);
- si.sz = sizeof(si);
+ auto sample = MakeSpan(data, length);
switch (mVideoCodec) {
case NESTEGG_CODEC_VP8:
- vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
+ isKeyframe = VPXDecoder::IsKeyframe(sample, VPXDecoder::Codec::VP8);
break;
case NESTEGG_CODEC_VP9:
- vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
+ isKeyframe = VPXDecoder::IsKeyframe(sample, VPXDecoder::Codec::VP9);
+ break;
+#ifdef MOZ_AV1
+ case NESTEGG_CODEC_AV1:
+ isKeyframe = AOMDecoder::IsKeyframe(sample);
break;
+#endif
+ case NESTEGG_CODEC_AVC1:
+ isKeyframe = nestegg_packet_has_keyframe(holder->Packet());
+ break;
+ default:
+ NS_WARNING("Cannot detect keyframes in unknown WebM video codec");
+ return NS_ERROR_FAILURE;
}
- isKeyframe = si.is_kf;
if (isKeyframe) {
- // We only look for resolution changes on keyframes for both VP8 and
- // VP9. Other resolution changes are invalid.
- if (mLastSeenFrameWidth.isSome() && mLastSeenFrameHeight.isSome() &&
- (si.w != mLastSeenFrameWidth.value() ||
- si.h != mLastSeenFrameHeight.value())) {
- mInfo.mVideo.mDisplay = nsIntSize(si.w, si.h);
+ // For both VP8 and VP9, we only look for resolution changes
+ // on keyframes. Other resolution changes are invalid.
+ auto dimensions = nsIntSize(0, 0);
+ switch (mVideoCodec) {
+ case NESTEGG_CODEC_VP8:
+ dimensions = VPXDecoder::GetFrameSize(sample, VPXDecoder::Codec::VP8);
+ break;
+ case NESTEGG_CODEC_VP9:
+ dimensions = VPXDecoder::GetFrameSize(sample, VPXDecoder::Codec::VP9);
+ break;
+#ifdef MOZ_AV1
+ case NESTEGG_CODEC_AV1:
+ dimensions = AOMDecoder::GetFrameSize(sample);
+ break;
+#endif
+ }
+ if (mLastSeenFrameSize.isSome()
+ && (dimensions != mLastSeenFrameSize.value())) {
+ mInfo.mVideo.mDisplay = dimensions;
mSharedVideoTrackInfo = new SharedTrackInfo(mInfo.mVideo, ++sStreamSourceID);
}
- mLastSeenFrameWidth = Some(si.w);
- mLastSeenFrameHeight = Some(si.h);
+ mLastSeenFrameSize = Some(dimensions);
}
}
}
@@ -668,7 +717,7 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
RefPtr<MediaRawData> sample = new MediaRawData(data, length);
if (length && !sample->Data()) {
// OOM.
- return false;
+ return NS_ERROR_OUT_OF_MEMORY;
}
sample->mTimecode = tstamp;
sample->mTime = tstamp;
@@ -719,13 +768,19 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
if (aType == TrackInfo::kVideoTrack) {
sample->mTrackInfo = mSharedVideoTrackInfo;
}
+
+ if (mVideoCodec == NESTEGG_CODEC_AVC1) {
+ sample->mExtraData = mInfo.mVideo.mExtraData;
+ }
+
aSamples->Push(sample);
}
- return true;
+ return NS_OK;
}
-RefPtr<NesteggPacketHolder>
-WebMDemuxer::NextPacket(TrackInfo::TrackType aType)
+nsresult
+WebMDemuxer::NextPacket(TrackInfo::TrackType aType,
+ RefPtr<NesteggPacketHolder>& aPacket)
{
bool isVideo = aType == TrackInfo::kVideoTrack;
@@ -734,56 +789,64 @@ WebMDemuxer::NextPacket(TrackInfo::TrackType aType)
bool hasType = isVideo ? mHasVideo : mHasAudio;
if (!hasType) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
// The packet queue for the type that we are interested in.
WebMPacketQueue &packets = isVideo ? mVideoPackets : mAudioPackets;
if (packets.GetSize() > 0) {
- return packets.PopFront();
+ aPacket = packets.PopFront();
+ return NS_OK;
}
// Track we are interested in
uint32_t ourTrack = isVideo ? mVideoTrack : mAudioTrack;
do {
- RefPtr<NesteggPacketHolder> holder = DemuxPacket(aType);
+ RefPtr<NesteggPacketHolder> holder;
+ nsresult rv = DemuxPacket(aType, holder);
+ if (NS_FAILED(rv)) {
+ return rv;
+ }
if (!holder) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
if (ourTrack == holder->Track()) {
- return holder;
+ aPacket = holder;
+ return NS_OK;
}
} while (true);
}
-RefPtr<NesteggPacketHolder>
-WebMDemuxer::DemuxPacket(TrackInfo::TrackType aType)
+nsresult
+WebMDemuxer::DemuxPacket(TrackInfo::TrackType aType,
+ RefPtr<NesteggPacketHolder>& aPacket)
{
nestegg_packet* packet;
int r = nestegg_read_packet(Context(aType), &packet);
if (r == 0) {
nestegg_read_reset(Context(aType));
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
} else if (r < 0) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
unsigned int track = 0;
r = nestegg_packet_track(packet, &track);
if (r == -1) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
int64_t offset = Resource(aType).Tell();
RefPtr<NesteggPacketHolder> holder = new NesteggPacketHolder();
if (!holder->Init(packet, offset, track, false)) {
- return nullptr;
+ return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
- return holder;
+ aPacket = holder;
+ return NS_OK;
}
void
@@ -943,7 +1006,14 @@ WebMTrackDemuxer::Seek(media::TimeUnit aTime)
media::TimeUnit seekTime = aTime;
mSamples.Reset();
mParent->SeekInternal(mType, aTime);
- mParent->GetNextPacket(mType, &mSamples);
+ nsresult rv = mParent->GetNextPacket(mType, &mSamples);
+ if (NS_FAILED(rv)) {
+ if (rv == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+ // Ignore the error for now, the next GetSample will be rejected with EOS.
+ return SeekPromise::CreateAndResolve(media::TimeUnit(), __func__);
+ }
+ return SeekPromise::CreateAndReject(rv, __func__);
+ }
mNeedKeyframe = true;
// Check what time we actually seeked to.
@@ -956,15 +1026,18 @@ WebMTrackDemuxer::Seek(media::TimeUnit aTime)
return SeekPromise::CreateAndResolve(seekTime, __func__);
}
-RefPtr<MediaRawData>
-WebMTrackDemuxer::NextSample()
+nsresult
+WebMTrackDemuxer::NextSample(RefPtr<MediaRawData>& aData)
{
- while (mSamples.GetSize() < 1 && mParent->GetNextPacket(mType, &mSamples)) {
+ nsresult rv;
+ while (mSamples.GetSize() < 1 &&
+ NS_SUCCEEDED((rv = mParent->GetNextPacket(mType, &mSamples)))) {
}
if (mSamples.GetSize()) {
- return mSamples.PopFront();
+ aData = mSamples.PopFront();
+ return NS_OK;
}
- return nullptr;
+ return rv;
}
RefPtr<WebMTrackDemuxer::SamplesPromise>
@@ -973,9 +1046,12 @@ WebMTrackDemuxer::GetSamples(int32_t aNumSamples)
RefPtr<SamplesHolder> samples = new SamplesHolder;
MOZ_ASSERT(aNumSamples);
+ nsresult rv = NS_ERROR_DOM_MEDIA_END_OF_STREAM;
+
while (aNumSamples) {
- RefPtr<MediaRawData> sample(NextSample());
- if (!sample) {
+ RefPtr<MediaRawData> sample;
+ rv = NextSample(sample);
+ if (NS_FAILED(rv)) {
break;
}
if (mNeedKeyframe && !sample->mKeyframe) {
@@ -987,7 +1063,7 @@ WebMTrackDemuxer::GetSamples(int32_t aNumSamples)
}
if (samples->mSamples.IsEmpty()) {
- return SamplesPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_END_OF_STREAM, __func__);
+ return SamplesPromise::CreateAndReject(rv, __func__);
} else {
UpdateSamples(samples->mSamples);
return SamplesPromise::CreateAndResolve(samples, __func__);
@@ -1022,7 +1098,8 @@ WebMTrackDemuxer::SetNextKeyFrameTime()
}
// Demux and buffer frames until we find a keyframe.
RefPtr<MediaRawData> sample;
- while (!foundKeyframe && (sample = NextSample())) {
+ nsresult rv = NS_OK;
+ while (!foundKeyframe && NS_SUCCEEDED((rv = NextSample(sample)))) {
if (sample->mKeyframe) {
frameTime = sample->mTime;
foundKeyframe = true;
@@ -1104,10 +1181,11 @@ WebMTrackDemuxer::SkipToNextRandomAccessPoint(media::TimeUnit aTimeThreshold)
uint32_t parsed = 0;
bool found = false;
RefPtr<MediaRawData> sample;
+ nsresult rv = NS_OK;
int64_t sampleTime;
WEBM_DEBUG("TimeThreshold: %f", aTimeThreshold.ToSeconds());
- while (!found && (sample = NextSample())) {
+ while (!found && NS_SUCCEEDED((rv = NextSample(sample)))) {
parsed++;
sampleTime = sample->mTime;
if (sample->mKeyframe && sampleTime >= aTimeThreshold.ToMicroseconds()) {
@@ -1116,7 +1194,9 @@ WebMTrackDemuxer::SkipToNextRandomAccessPoint(media::TimeUnit aTimeThreshold)
mSamples.PushFront(sample.forget());
}
}
- SetNextKeyFrameTime();
+ if (NS_SUCCEEDED(rv)) {
+ SetNextKeyFrameTime();
+ }
if (found) {
WEBM_DEBUG("next sample: %f (parsed: %d)",
media::TimeUnit::FromMicroseconds(sampleTime).ToSeconds(),