diff options
Diffstat (limited to 'dom/media/platforms/wrappers')
-rw-r--r-- | dom/media/platforms/wrappers/FuzzingWrapper.cpp | 328 | ||||
-rw-r--r-- | dom/media/platforms/wrappers/FuzzingWrapper.h | 124 | ||||
-rw-r--r-- | dom/media/platforms/wrappers/H264Converter.cpp | 311 | ||||
-rw-r--r-- | dom/media/platforms/wrappers/H264Converter.h | 74 |
4 files changed, 837 insertions, 0 deletions
diff --git a/dom/media/platforms/wrappers/FuzzingWrapper.cpp b/dom/media/platforms/wrappers/FuzzingWrapper.cpp new file mode 100644 index 000000000..7df020f46 --- /dev/null +++ b/dom/media/platforms/wrappers/FuzzingWrapper.cpp @@ -0,0 +1,328 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "FuzzingWrapper.h" + +mozilla::LogModule* GetFuzzingWrapperLog() { + static mozilla::LazyLogModule log("MediaFuzzingWrapper"); + return log; +} +#define DFW_LOGD(arg, ...) MOZ_LOG(GetFuzzingWrapperLog(), mozilla::LogLevel::Debug, ("DecoderFuzzingWrapper(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) +#define DFW_LOGV(arg, ...) MOZ_LOG(GetFuzzingWrapperLog(), mozilla::LogLevel::Verbose, ("DecoderFuzzingWrapper(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) +#define CFW_LOGD(arg, ...) MOZ_LOG(GetFuzzingWrapperLog(), mozilla::LogLevel::Debug, ("DecoderCallbackFuzzingWrapper(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) +#define CFW_LOGV(arg, ...) MOZ_LOG(GetFuzzingWrapperLog(), mozilla::LogLevel::Verbose, ("DecoderCallbackFuzzingWrapper(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) + +namespace mozilla { + +DecoderFuzzingWrapper::DecoderFuzzingWrapper( + already_AddRefed<MediaDataDecoder> aDecoder, + already_AddRefed<DecoderCallbackFuzzingWrapper> aCallbackWrapper) + : mDecoder(aDecoder) + , mCallbackWrapper(aCallbackWrapper) +{ + DFW_LOGV("aDecoder=%p aCallbackWrapper=%p", mDecoder.get(), mCallbackWrapper.get()); +} + +DecoderFuzzingWrapper::~DecoderFuzzingWrapper() +{ + DFW_LOGV(""); +} + +RefPtr<MediaDataDecoder::InitPromise> +DecoderFuzzingWrapper::Init() +{ + DFW_LOGV(""); + MOZ_ASSERT(mDecoder); + return mDecoder->Init(); +} + +void +DecoderFuzzingWrapper::Input(MediaRawData* aData) +{ + DFW_LOGV("aData.mTime=%lld", aData->mTime); + MOZ_ASSERT(mDecoder); + mDecoder->Input(aData); +} + +void +DecoderFuzzingWrapper::Flush() +{ + DFW_LOGV("Calling mDecoder[%p]->Flush()", mDecoder.get()); + MOZ_ASSERT(mDecoder); + // Flush may output some frames (though unlikely). + // Flush may block a bit, it's ok if we output some frames in the meantime. + mDecoder->Flush(); + DFW_LOGV("mDecoder[%p]->Flush()", mDecoder.get()); + // Clear any delayed output we may have. + mCallbackWrapper->ClearDelayedOutput(); +} + +void +DecoderFuzzingWrapper::Drain() +{ + DFW_LOGV(""); + MOZ_ASSERT(mDecoder); + // Note: The decoder should callback DrainComplete(), we'll drain the + // delayed output (if any) then. + mDecoder->Drain(); +} + +void +DecoderFuzzingWrapper::Shutdown() +{ + DFW_LOGV(""); + MOZ_ASSERT(mDecoder); + // Both shutdowns below may block a bit. + mDecoder->Shutdown(); + mCallbackWrapper->Shutdown(); +} + +bool +DecoderFuzzingWrapper::IsHardwareAccelerated(nsACString& aFailureReason) const +{ + DFW_LOGV(""); + MOZ_ASSERT(mDecoder); + return mDecoder->IsHardwareAccelerated(aFailureReason); +} + +DecoderCallbackFuzzingWrapper::DecoderCallbackFuzzingWrapper(MediaDataDecoderCallback* aCallback) + : mCallback(aCallback) + , mDontDelayInputExhausted(false) + , mDraining(false) + , mTaskQueue(new TaskQueue(SharedThreadPool::Get(NS_LITERAL_CSTRING("MediaFuzzingWrapper"), 1))) +{ + CFW_LOGV("aCallback=%p", aCallback); +} + +DecoderCallbackFuzzingWrapper::~DecoderCallbackFuzzingWrapper() +{ + CFW_LOGV(""); +} + +void +DecoderCallbackFuzzingWrapper::SetVideoOutputMinimumInterval( + TimeDuration aFrameOutputMinimumInterval) +{ + CFW_LOGD("aFrameOutputMinimumInterval=%fms", + aFrameOutputMinimumInterval.ToMilliseconds()); + mFrameOutputMinimumInterval = aFrameOutputMinimumInterval; +} + +void +DecoderCallbackFuzzingWrapper::SetDontDelayInputExhausted( + bool aDontDelayInputExhausted) +{ + CFW_LOGD("aDontDelayInputExhausted=%d", + aDontDelayInputExhausted); + mDontDelayInputExhausted = aDontDelayInputExhausted; +} + +void +DecoderCallbackFuzzingWrapper::Output(MediaData* aData) +{ + if (!mTaskQueue->IsCurrentThreadIn()) { + nsCOMPtr<nsIRunnable> task = + NewRunnableMethod<StorensRefPtrPassByPtr<MediaData>>( + this, &DecoderCallbackFuzzingWrapper::Output, aData); + mTaskQueue->Dispatch(task.forget()); + return; + } + CFW_LOGV("aData.mTime=%lld", aData->mTime); + MOZ_ASSERT(mCallback); + if (mFrameOutputMinimumInterval) { + if (!mPreviousOutput.IsNull()) { + if (!mDelayedOutput.empty()) { + // We already have some delayed frames, just add this one to the queue. + mDelayedOutput.push_back(MakePair<RefPtr<MediaData>, bool>(aData, false)); + CFW_LOGD("delaying output of sample@%lld, total queued:%d", + aData->mTime, int(mDelayedOutput.size())); + return; + } + if (TimeStamp::Now() < mPreviousOutput + mFrameOutputMinimumInterval) { + // Frame arriving too soon after the previous one, start queuing. + mDelayedOutput.push_back(MakePair<RefPtr<MediaData>, bool>(aData, false)); + CFW_LOGD("delaying output of sample@%lld, first queued", aData->mTime); + if (!mDelayedOutputTimer) { + mDelayedOutputTimer = new MediaTimer(); + } + ScheduleOutputDelayedFrame(); + return; + } + } + // If we're here, we're going to actually output a frame -> Record time. + mPreviousOutput = TimeStamp::Now(); + } + + // Passing the data straight through, no need to dispatch to another queue, + // callback should deal with that. + mCallback->Output(aData); +} + +void +DecoderCallbackFuzzingWrapper::Error(const MediaResult& aError) +{ + if (!mTaskQueue->IsCurrentThreadIn()) { + mTaskQueue->Dispatch(NewRunnableMethod<MediaResult>( + this, &DecoderCallbackFuzzingWrapper::Error, aError)); + return; + } + CFW_LOGV(""); + MOZ_ASSERT(mCallback); + ClearDelayedOutput(); + mCallback->Error(aError); +} + +void +DecoderCallbackFuzzingWrapper::InputExhausted() +{ + if (!mTaskQueue->IsCurrentThreadIn()) { + mTaskQueue->Dispatch(NewRunnableMethod(this, &DecoderCallbackFuzzingWrapper::InputExhausted)); + return; + } + if (!mDontDelayInputExhausted && !mDelayedOutput.empty()) { + MediaDataAndInputExhausted& last = mDelayedOutput.back(); + CFW_LOGD("InputExhausted delayed until after output of sample@%lld", + last.first()->mTime); + last.second() = true; + return; + } + CFW_LOGV(""); + MOZ_ASSERT(mCallback); + mCallback->InputExhausted(); +} + +void +DecoderCallbackFuzzingWrapper::DrainComplete() +{ + if (!mTaskQueue->IsCurrentThreadIn()) { + mTaskQueue->Dispatch(NewRunnableMethod(this, &DecoderCallbackFuzzingWrapper::DrainComplete)); + return; + } + MOZ_ASSERT(mCallback); + if (mDelayedOutput.empty()) { + // No queued output -> Draining is complete now. + CFW_LOGV("No delayed output -> DrainComplete now"); + mCallback->DrainComplete(); + } else { + // Queued output waiting -> Make sure we call DrainComplete when it's empty. + CFW_LOGD("Delayed output -> DrainComplete later"); + mDraining = true; + } +} + +void +DecoderCallbackFuzzingWrapper::ReleaseMediaResources() +{ + if (!mTaskQueue->IsCurrentThreadIn()) { + mTaskQueue->Dispatch(NewRunnableMethod(this, &DecoderCallbackFuzzingWrapper::ReleaseMediaResources)); + return; + } + CFW_LOGV(""); + MOZ_ASSERT(mCallback); + mCallback->ReleaseMediaResources(); +} + +bool +DecoderCallbackFuzzingWrapper::OnReaderTaskQueue() +{ + CFW_LOGV(""); + MOZ_ASSERT(mCallback); + return mCallback->OnReaderTaskQueue(); +} + +void +DecoderCallbackFuzzingWrapper::ScheduleOutputDelayedFrame() +{ + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + if (mDelayedOutputRequest.Exists()) { + // A delayed output is already scheduled, no need for more than one timer. + return; + } + RefPtr<DecoderCallbackFuzzingWrapper> self = this; + mDelayedOutputRequest.Begin( + mDelayedOutputTimer->WaitUntil( + mPreviousOutput + mFrameOutputMinimumInterval, + __func__) + ->Then(mTaskQueue, __func__, + [self] () -> void { + if (self->mDelayedOutputRequest.Exists()) { + self->mDelayedOutputRequest.Complete(); + self->OutputDelayedFrame(); + } + }, + [self] () -> void { + if (self->mDelayedOutputRequest.Exists()) { + self->mDelayedOutputRequest.Complete(); + self->ClearDelayedOutput(); + } + })); +} + +void +DecoderCallbackFuzzingWrapper::OutputDelayedFrame() +{ + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + if (mDelayedOutput.empty()) { + if (mDraining) { + // No more output, and we were draining -> Send DrainComplete. + mDraining = false; + mCallback->DrainComplete(); + } + return; + } + MediaDataAndInputExhausted& data = mDelayedOutput.front(); + CFW_LOGD("Outputting delayed sample@%lld, remaining:%d", + data.first()->mTime, int(mDelayedOutput.size() - 1)); + mPreviousOutput = TimeStamp::Now(); + mCallback->Output(data.first()); + if (data.second()) { + CFW_LOGD("InputExhausted after delayed sample@%lld", data.first()->mTime); + mCallback->InputExhausted(); + } + mDelayedOutput.pop_front(); + if (!mDelayedOutput.empty()) { + // More output -> Send it later. + ScheduleOutputDelayedFrame(); + } else if (mDraining) { + // No more output, and we were draining -> Send DrainComplete. + CFW_LOGD("DrainComplete"); + mDraining = false; + mCallback->DrainComplete(); + } +} + +void +DecoderCallbackFuzzingWrapper::ClearDelayedOutput() +{ + if (!mTaskQueue->IsCurrentThreadIn()) { + DFW_LOGV("(dispatching self)"); + mTaskQueue->Dispatch(NewRunnableMethod(this, &DecoderCallbackFuzzingWrapper::ClearDelayedOutput)); + return; + } + DFW_LOGV(""); + // In case a timer hasn't lapsed yet, before destroying the timer and its + // attached waitUntil() promise, the 'Then' request must be disconnected. + mDelayedOutputRequest.DisconnectIfExists(); + mDelayedOutputTimer = nullptr; + mDelayedOutput.clear(); +} + +void +DecoderCallbackFuzzingWrapper::Shutdown() +{ + CFW_LOGV("Clear delayed output (if any) before shutting down mTaskQueue"); + ClearDelayedOutput(); + // Await idle here, so that 'ClearDelayedOutput' runs to completion before + // the task queue is shutdown (and tasks can't be queued anymore). + mTaskQueue->AwaitIdle(); + + CFW_LOGV("Shutting down mTaskQueue"); + mTaskQueue->BeginShutdown(); + mTaskQueue->AwaitIdle(); + CFW_LOGV("mTaskQueue shut down"); +} + +} // namespace mozilla diff --git a/dom/media/platforms/wrappers/FuzzingWrapper.h b/dom/media/platforms/wrappers/FuzzingWrapper.h new file mode 100644 index 000000000..c2b737520 --- /dev/null +++ b/dom/media/platforms/wrappers/FuzzingWrapper.h @@ -0,0 +1,124 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(FuzzingWrapper_h_) +#define FuzzingWrapper_h_ + +#include "mozilla/Pair.h" +#include "PlatformDecoderModule.h" + +#include <deque> + +namespace mozilla { + +// Fuzzing wrapper for media decoders. +// +// DecoderFuzzingWrapper owns the DecoderCallbackFuzzingWrapper, and inserts +// itself between the reader and the decoder. +// DecoderCallbackFuzzingWrapper inserts itself between a decoder and its +// callback. +// Together they are used to introduce some fuzzing, (e.g. delay output). +// +// Normally: +// ====================================> +// reader decoder +// <------------------------------------ +// +// With fuzzing: +// ======> DecoderFuzzingWrapper ======> +// reader v decoder +// <-- DecoderCallbackFuzzingWrapper <-- +// +// Creation order should be: +// 1. Create DecoderCallbackFuzzingWrapper, give the expected callback target. +// 2. Create actual decoder, give DecoderCallbackFuzzingWrapper as callback. +// 3. Create DecoderFuzzingWrapper, give decoder and DecoderCallbackFuzzingWrapper. +// DecoderFuzzingWrapper is what the reader sees as decoder, it owns the +// real decoder and the DecoderCallbackFuzzingWrapper. + +class DecoderCallbackFuzzingWrapper : public MediaDataDecoderCallback +{ +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DecoderCallbackFuzzingWrapper) + + explicit DecoderCallbackFuzzingWrapper(MediaDataDecoderCallback* aCallback); + + // Enforce a minimum interval between output frames (i.e., limit frame rate). + // Of course, if the decoder is even slower, this won't have any effect. + void SetVideoOutputMinimumInterval(TimeDuration aFrameOutputMinimumInterval); + // If false (default), if frames are delayed, any InputExhausted is delayed to + // be later sent after the corresponding delayed frame. + // If true, InputExhausted are passed through immediately; This could result + // in lots of frames being decoded and queued for delayed output! + void SetDontDelayInputExhausted(bool aDontDelayInputExhausted); + +private: + virtual ~DecoderCallbackFuzzingWrapper(); + + // MediaDataDecoderCallback implementation. + void Output(MediaData* aData) override; + void Error(const MediaResult& aError) override; + void InputExhausted() override; + void DrainComplete() override; + void ReleaseMediaResources() override; + bool OnReaderTaskQueue() override; + + MediaDataDecoderCallback* mCallback; + + // Settings for minimum frame output interval & InputExhausted, + // should be set during init and then only read on mTaskQueue. + TimeDuration mFrameOutputMinimumInterval; + bool mDontDelayInputExhausted; + // Members for minimum frame output interval & InputExhausted, + // should only be accessed on mTaskQueue. + TimeStamp mPreviousOutput; + // First member is the frame to be delayed. + // Second member is true if an 'InputExhausted' arrived after that frame; in + // which case an InputExhausted will be sent after finally outputting the frame. + typedef Pair<RefPtr<MediaData>, bool> MediaDataAndInputExhausted; + std::deque<MediaDataAndInputExhausted> mDelayedOutput; + RefPtr<MediaTimer> mDelayedOutputTimer; + MozPromiseRequestHolder<MediaTimerPromise> mDelayedOutputRequest; + // If draining, a 'DrainComplete' will be sent after all delayed frames have + // been output. + bool mDraining; + // All callbacks are redirected through this task queue, both to avoid locking + // and to have a consistent sequencing of callbacks. + RefPtr<TaskQueue> mTaskQueue; + void ScheduleOutputDelayedFrame(); + void OutputDelayedFrame(); +public: // public for the benefit of DecoderFuzzingWrapper. + void ClearDelayedOutput(); + void Shutdown(); +}; + +class DecoderFuzzingWrapper : public MediaDataDecoder +{ +public: + DecoderFuzzingWrapper(already_AddRefed<MediaDataDecoder> aDecoder, + already_AddRefed<DecoderCallbackFuzzingWrapper> aCallbackWrapper); + + // MediaDataDecoder implementation. + RefPtr<InitPromise> Init() override; + void Input(MediaRawData* aSample) override; + void Flush() override; + void Drain() override; + void Shutdown() override; + bool IsHardwareAccelerated(nsACString& aFailureReason) const override; + const char* GetDescriptionName() const override + { + return mDecoder->GetDescriptionName(); + } + +private: + virtual ~DecoderFuzzingWrapper(); + RefPtr<MediaDataDecoder> mDecoder; + RefPtr<DecoderCallbackFuzzingWrapper> mCallbackWrapper; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/platforms/wrappers/H264Converter.cpp b/dom/media/platforms/wrappers/H264Converter.cpp new file mode 100644 index 000000000..0edbfc10c --- /dev/null +++ b/dom/media/platforms/wrappers/H264Converter.cpp @@ -0,0 +1,311 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "mozilla/TaskQueue.h" + +#include "H264Converter.h" +#include "ImageContainer.h" +#include "MediaInfo.h" +#include "mp4_demuxer/AnnexB.h" +#include "mp4_demuxer/H264.h" + +namespace mozilla +{ + +H264Converter::H264Converter(PlatformDecoderModule* aPDM, + const CreateDecoderParams& aParams) + : mPDM(aPDM) + , mCurrentConfig(aParams.VideoConfig()) + , mKnowsCompositor(aParams.mKnowsCompositor) + , mImageContainer(aParams.mImageContainer) + , mTaskQueue(aParams.mTaskQueue) + , mCallback(aParams.mCallback) + , mDecoder(nullptr) + , mGMPCrashHelper(aParams.mCrashHelper) + , mNeedAVCC(aPDM->DecoderNeedsConversion(aParams.mConfig) + == PlatformDecoderModule::ConversionRequired::kNeedAVCC) + , mLastError(NS_OK) +{ + CreateDecoder(aParams.mDiagnostics); +} + +H264Converter::~H264Converter() +{ +} + +RefPtr<MediaDataDecoder::InitPromise> +H264Converter::Init() +{ + if (mDecoder) { + return mDecoder->Init(); + } + + // We haven't been able to initialize a decoder due to a missing SPS/PPS. + return MediaDataDecoder::InitPromise::CreateAndResolve( + TrackType::kVideoTrack, __func__); +} + +void +H264Converter::Input(MediaRawData* aSample) +{ + if (!mp4_demuxer::AnnexB::ConvertSampleToAVCC(aSample)) { + // We need AVCC content to be able to later parse the SPS. + // This is a no-op if the data is already AVCC. + mCallback->Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, + RESULT_DETAIL("ConvertSampleToAVCC"))); + return; + } + + if (mInitPromiseRequest.Exists()) { + if (mNeedKeyframe) { + if (!aSample->mKeyframe) { + // Frames dropped, we need a new one. + mCallback->InputExhausted(); + return; + } + mNeedKeyframe = false; + } + mMediaRawSamples.AppendElement(aSample); + return; + } + + nsresult rv; + if (!mDecoder) { + // It is not possible to create an AVCC H264 decoder without SPS. + // As such, creation will fail if the extra_data just extracted doesn't + // contain a SPS. + rv = CreateDecoderAndInit(aSample); + if (rv == NS_ERROR_NOT_INITIALIZED) { + // We are missing the required SPS to create the decoder. + // Ignore for the time being, the MediaRawData will be dropped. + mCallback->InputExhausted(); + return; + } + } else { + rv = CheckForSPSChange(aSample); + if (rv == NS_ERROR_NOT_INITIALIZED) { + // The decoder is pending initialization. + mCallback->InputExhausted(); + return; + } + } + if (NS_FAILED(rv)) { + mCallback->Error( + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Unable to create H264 decoder"))); + return; + } + + if (mNeedKeyframe && !aSample->mKeyframe) { + mCallback->InputExhausted(); + return; + } + + if (!mNeedAVCC && + !mp4_demuxer::AnnexB::ConvertSampleToAnnexB(aSample, mNeedKeyframe)) { + mCallback->Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, + RESULT_DETAIL("ConvertSampleToAnnexB"))); + return; + } + + mNeedKeyframe = false; + + aSample->mExtraData = mCurrentConfig.mExtraData; + + mDecoder->Input(aSample); +} + +void +H264Converter::Flush() +{ + mNeedKeyframe = true; + if (mDecoder) { + mDecoder->Flush(); + } +} + +void +H264Converter::Drain() +{ + mNeedKeyframe = true; + if (mDecoder) { + mDecoder->Drain(); + return; + } + mCallback->DrainComplete(); +} + +void +H264Converter::Shutdown() +{ + if (mDecoder) { + mDecoder->Shutdown(); + mInitPromiseRequest.DisconnectIfExists(); + mDecoder = nullptr; + } +} + +bool +H264Converter::IsHardwareAccelerated(nsACString& aFailureReason) const +{ + if (mDecoder) { + return mDecoder->IsHardwareAccelerated(aFailureReason); + } + return MediaDataDecoder::IsHardwareAccelerated(aFailureReason); +} + +void +H264Converter::SetSeekThreshold(const media::TimeUnit& aTime) +{ + if (mDecoder) { + mDecoder->SetSeekThreshold(aTime); + } else { + MediaDataDecoder::SetSeekThreshold(aTime); + } +} + +nsresult +H264Converter::CreateDecoder(DecoderDoctorDiagnostics* aDiagnostics) +{ + if (!mp4_demuxer::AnnexB::HasSPS(mCurrentConfig.mExtraData)) { + // nothing found yet, will try again later + return NS_ERROR_NOT_INITIALIZED; + } + UpdateConfigFromExtraData(mCurrentConfig.mExtraData); + + mp4_demuxer::SPSData spsdata; + if (mp4_demuxer::H264::DecodeSPSFromExtraData(mCurrentConfig.mExtraData, spsdata)) { + // Do some format check here. + // WMF H.264 Video Decoder and Apple ATDecoder do not support YUV444 format. + if (spsdata.profile_idc == 244 /* Hi444PP */ || + spsdata.chroma_format_idc == PDMFactory::kYUV444) { + mLastError = NS_ERROR_FAILURE; + if (aDiagnostics) { + aDiagnostics->SetVideoNotSupported(); + } + return NS_ERROR_FAILURE; + } + } else { + // SPS was invalid. + mLastError = NS_ERROR_FAILURE; + return NS_ERROR_FAILURE; + } + + mDecoder = mPDM->CreateVideoDecoder({ + mCurrentConfig, + mTaskQueue, + mCallback, + aDiagnostics, + mImageContainer, + mKnowsCompositor, + mGMPCrashHelper + }); + + if (!mDecoder) { + mLastError = NS_ERROR_FAILURE; + return NS_ERROR_FAILURE; + } + + mNeedKeyframe = true; + + return NS_OK; +} + +nsresult +H264Converter::CreateDecoderAndInit(MediaRawData* aSample) +{ + RefPtr<MediaByteBuffer> extra_data = + mp4_demuxer::AnnexB::ExtractExtraData(aSample); + if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) { + return NS_ERROR_NOT_INITIALIZED; + } + UpdateConfigFromExtraData(extra_data); + + nsresult rv = CreateDecoder(/* DecoderDoctorDiagnostics* */ nullptr); + + if (NS_SUCCEEDED(rv)) { + // Queue the incoming sample. + mMediaRawSamples.AppendElement(aSample); + + mInitPromiseRequest.Begin(mDecoder->Init() + ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, this, + &H264Converter::OnDecoderInitDone, + &H264Converter::OnDecoderInitFailed)); + return NS_ERROR_NOT_INITIALIZED; + } + return rv; +} + +void +H264Converter::OnDecoderInitDone(const TrackType aTrackType) +{ + mInitPromiseRequest.Complete(); + bool gotInput = false; + for (uint32_t i = 0 ; i < mMediaRawSamples.Length(); i++) { + const RefPtr<MediaRawData>& sample = mMediaRawSamples[i]; + if (mNeedKeyframe) { + if (!sample->mKeyframe) { + continue; + } + mNeedKeyframe = false; + } + if (!mNeedAVCC && + !mp4_demuxer::AnnexB::ConvertSampleToAnnexB(sample, mNeedKeyframe)) { + mCallback->Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, + RESULT_DETAIL("ConvertSampleToAnnexB"))); + mMediaRawSamples.Clear(); + return; + } + mDecoder->Input(sample); + } + if (!gotInput) { + mCallback->InputExhausted(); + } + mMediaRawSamples.Clear(); +} + +void +H264Converter::OnDecoderInitFailed(MediaResult aError) +{ + mInitPromiseRequest.Complete(); + mCallback->Error( + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Unable to initialize H264 decoder"))); +} + +nsresult +H264Converter::CheckForSPSChange(MediaRawData* aSample) +{ + RefPtr<MediaByteBuffer> extra_data = + mp4_demuxer::AnnexB::ExtractExtraData(aSample); + if (!mp4_demuxer::AnnexB::HasSPS(extra_data) || + mp4_demuxer::AnnexB::CompareExtraData(extra_data, + mCurrentConfig.mExtraData)) { + return NS_OK; + } + // The SPS has changed, signal to flush the current decoder and create a + // new one. + mDecoder->Flush(); + Shutdown(); + return CreateDecoderAndInit(aSample); +} + +void +H264Converter::UpdateConfigFromExtraData(MediaByteBuffer* aExtraData) +{ + mp4_demuxer::SPSData spsdata; + if (mp4_demuxer::H264::DecodeSPSFromExtraData(aExtraData, spsdata) && + spsdata.pic_width > 0 && spsdata.pic_height > 0) { + mp4_demuxer::H264::EnsureSPSIsSane(spsdata); + mCurrentConfig.mImage.width = spsdata.pic_width; + mCurrentConfig.mImage.height = spsdata.pic_height; + mCurrentConfig.mDisplay.width = spsdata.display_width; + mCurrentConfig.mDisplay.height = spsdata.display_height; + } + mCurrentConfig.mExtraData = aExtraData; +} + +} // namespace mozilla diff --git a/dom/media/platforms/wrappers/H264Converter.h b/dom/media/platforms/wrappers/H264Converter.h new file mode 100644 index 000000000..6905b1c74 --- /dev/null +++ b/dom/media/platforms/wrappers/H264Converter.h @@ -0,0 +1,74 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef mozilla_H264Converter_h +#define mozilla_H264Converter_h + +#include "PlatformDecoderModule.h" + +namespace mozilla { + +// H264Converter is a MediaDataDecoder wrapper used to ensure that +// only AVCC or AnnexB is fed to the underlying MediaDataDecoder. +// The H264Converter allows playback of content where the SPS NAL may not be +// provided in the init segment (e.g. AVC3 or Annex B) +// H264Converter will monitor the input data, and will delay creation of the +// MediaDataDecoder until a SPS and PPS NALs have been extracted. + +class H264Converter : public MediaDataDecoder { +public: + + H264Converter(PlatformDecoderModule* aPDM, + const CreateDecoderParams& aParams); + virtual ~H264Converter(); + + RefPtr<InitPromise> Init() override; + void Input(MediaRawData* aSample) override; + void Flush() override; + void Drain() override; + void Shutdown() override; + bool IsHardwareAccelerated(nsACString& aFailureReason) const override; + const char* GetDescriptionName() const override + { + if (mDecoder) { + return mDecoder->GetDescriptionName(); + } + return "H264Converter decoder (pending)"; + } + void SetSeekThreshold(const media::TimeUnit& aTime) override; + + nsresult GetLastError() const { return mLastError; } + +private: + // Will create the required MediaDataDecoder if need AVCC and we have a SPS NAL. + // Returns NS_ERROR_FAILURE if error is permanent and can't be recovered and + // will set mError accordingly. + nsresult CreateDecoder(DecoderDoctorDiagnostics* aDiagnostics); + nsresult CreateDecoderAndInit(MediaRawData* aSample); + nsresult CheckForSPSChange(MediaRawData* aSample); + void UpdateConfigFromExtraData(MediaByteBuffer* aExtraData); + + void OnDecoderInitDone(const TrackType aTrackType); + void OnDecoderInitFailed(MediaResult aError); + + RefPtr<PlatformDecoderModule> mPDM; + VideoInfo mCurrentConfig; + RefPtr<layers::KnowsCompositor> mKnowsCompositor; + RefPtr<layers::ImageContainer> mImageContainer; + const RefPtr<TaskQueue> mTaskQueue; + nsTArray<RefPtr<MediaRawData>> mMediaRawSamples; + MediaDataDecoderCallback* mCallback; + RefPtr<MediaDataDecoder> mDecoder; + MozPromiseRequestHolder<InitPromise> mInitPromiseRequest; + RefPtr<GMPCrashHelper> mGMPCrashHelper; + bool mNeedAVCC; + nsresult mLastError; + bool mNeedKeyframe = true; +}; + +} // namespace mozilla + +#endif // mozilla_H264Converter_h |