summaryrefslogtreecommitdiffstats
path: root/dom/media/android
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/android')
-rw-r--r--dom/media/android/AndroidMediaDecoder.cpp25
-rw-r--r--dom/media/android/AndroidMediaDecoder.h28
-rw-r--r--dom/media/android/AndroidMediaPluginHost.cpp311
-rw-r--r--dom/media/android/AndroidMediaPluginHost.h41
-rw-r--r--dom/media/android/AndroidMediaReader.cpp449
-rw-r--r--dom/media/android/AndroidMediaReader.h75
-rw-r--r--dom/media/android/AndroidMediaResourceServer.cpp503
-rw-r--r--dom/media/android/AndroidMediaResourceServer.h96
-rw-r--r--dom/media/android/MPAPI.h165
-rw-r--r--dom/media/android/moz.build27
10 files changed, 1720 insertions, 0 deletions
diff --git a/dom/media/android/AndroidMediaDecoder.cpp b/dom/media/android/AndroidMediaDecoder.cpp
new file mode 100644
index 000000000..41ef3fcb0
--- /dev/null
+++ b/dom/media/android/AndroidMediaDecoder.cpp
@@ -0,0 +1,25 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaDecoderStateMachine.h"
+#include "AndroidMediaDecoder.h"
+#include "AndroidMediaReader.h"
+
+namespace mozilla {
+
+AndroidMediaDecoder::AndroidMediaDecoder(MediaDecoderOwner* aOwner,
+ const nsACString& aType)
+ : MediaDecoder(aOwner), mType(aType)
+{
+}
+
+MediaDecoderStateMachine* AndroidMediaDecoder::CreateStateMachine()
+{
+ return new MediaDecoderStateMachine(this, new AndroidMediaReader(this, mType));
+}
+
+} // namespace mozilla
+
diff --git a/dom/media/android/AndroidMediaDecoder.h b/dom/media/android/AndroidMediaDecoder.h
new file mode 100644
index 000000000..88b5a243f
--- /dev/null
+++ b/dom/media/android/AndroidMediaDecoder.h
@@ -0,0 +1,28 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(AndroidMediaDecoder_h_)
+#define AndroidMediaDecoder_h_
+
+#include "MediaDecoder.h"
+#include "AndroidMediaDecoder.h"
+
+namespace mozilla {
+
+class AndroidMediaDecoder : public MediaDecoder
+{
+ nsCString mType;
+public:
+ AndroidMediaDecoder(MediaDecoderOwner* aOwner, const nsACString& aType);
+
+ MediaDecoder* Clone(MediaDecoderOwner* aOwner) override {
+ return new AndroidMediaDecoder(aOwner, mType);
+ }
+ MediaDecoderStateMachine* CreateStateMachine() override;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/android/AndroidMediaPluginHost.cpp b/dom/media/android/AndroidMediaPluginHost.cpp
new file mode 100644
index 000000000..d3dce2b93
--- /dev/null
+++ b/dom/media/android/AndroidMediaPluginHost.cpp
@@ -0,0 +1,311 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include "mozilla/Preferences.h"
+#include "MediaResource.h"
+#include "mozilla/dom/HTMLMediaElement.h"
+#include "mozilla/Services.h"
+#include "AndroidMediaPluginHost.h"
+#include "nsAutoPtr.h"
+#include "nsXPCOMStrings.h"
+#include "nsISeekableStream.h"
+#include "nsIGfxInfo.h"
+#include "gfxCrashReporterUtils.h"
+#include "prmem.h"
+#include "prlink.h"
+#include "AndroidMediaResourceServer.h"
+#include "nsServiceManagerUtils.h"
+
+#include "MPAPI.h"
+
+#include "nsIPropertyBag2.h"
+
+#if defined(ANDROID) || defined(MOZ_WIDGET_GONK)
+#include "android/log.h"
+#define ALOG(args...) __android_log_print(ANDROID_LOG_INFO, "AndroidMediaPluginHost" , ## args)
+#else
+#define ALOG(args...) /* do nothing */
+#endif
+
+using namespace MPAPI;
+
+Decoder::Decoder() :
+ mResource(nullptr), mPrivate(nullptr)
+{
+}
+
+namespace mozilla {
+
+static char* GetResource(Decoder *aDecoder)
+{
+ return static_cast<char*>(aDecoder->mResource);
+}
+
+class GetIntPrefEvent : public Runnable {
+public:
+ GetIntPrefEvent(const char* aPref, int32_t* aResult)
+ : mPref(aPref), mResult(aResult) {}
+ NS_IMETHOD Run() override {
+ return Preferences::GetInt(mPref, mResult);
+ }
+private:
+ const char* mPref;
+ int32_t* mResult;
+};
+
+static bool GetIntPref(const char* aPref, int32_t* aResult)
+{
+ // GetIntPref() is called on the decoder thread, but the Preferences API
+ // can only be called on the main thread. Post a runnable and wait.
+ NS_ENSURE_TRUE(aPref, false);
+ NS_ENSURE_TRUE(aResult, false);
+ nsCOMPtr<nsIRunnable> event = new GetIntPrefEvent(aPref, aResult);
+ return NS_SUCCEEDED(NS_DispatchToMainThread(event, NS_DISPATCH_SYNC));
+}
+
+static bool
+GetSystemInfoString(const char *aKey, char *aResult, size_t aResultLength)
+{
+ NS_ENSURE_TRUE(aKey, false);
+ NS_ENSURE_TRUE(aResult, false);
+
+ nsCOMPtr<nsIPropertyBag2> infoService = do_GetService("@mozilla.org/system-info;1");
+ NS_ASSERTION(infoService, "Could not find a system info service");
+
+ nsAutoCString key(aKey);
+ nsAutoCString info;
+ nsresult rv = infoService->GetPropertyAsACString(NS_ConvertUTF8toUTF16(key),
+ info);
+
+ NS_ENSURE_SUCCESS(rv, false);
+
+ strncpy(aResult, info.get(), aResultLength);
+
+ return true;
+}
+
+static PluginHost sPluginHost = {
+ nullptr,
+ nullptr,
+ nullptr,
+ nullptr,
+ GetIntPref,
+ GetSystemInfoString
+};
+
+// Return true if Omx decoding is supported on the device. This checks the
+// built in whitelist/blacklist and preferences to see if that is overridden.
+static bool IsOmxSupported()
+{
+ bool forceEnabled =
+ Preferences::GetBool("stagefright.force-enabled", false);
+ bool disabled =
+ Preferences::GetBool("stagefright.disabled", false);
+
+ if (disabled) {
+ NS_WARNING("XXX stagefright disabled\n");
+ return false;
+ }
+
+ ScopedGfxFeatureReporter reporter("Stagefright", forceEnabled);
+
+ if (!forceEnabled) {
+ nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
+ if (gfxInfo) {
+ int32_t status;
+ nsCString discardFailure;
+ if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(nsIGfxInfo::FEATURE_STAGEFRIGHT, discardFailure, &status))) {
+ if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
+ NS_WARNING("XXX stagefright blacklisted\n");
+ return false;
+ }
+ }
+ }
+ }
+
+ reporter.SetSuccessful();
+ return true;
+}
+
+// Return the name of the shared library that implements Omx based decoding. This varies
+// depending on libstagefright version installed on the device and whether it is B2G vs Android.
+// nullptr is returned if Omx decoding is not supported on the device,
+static const char* GetOmxLibraryName()
+{
+#if defined(ANDROID) && !defined(MOZ_WIDGET_GONK)
+ nsCOMPtr<nsIPropertyBag2> infoService = do_GetService("@mozilla.org/system-info;1");
+ NS_ASSERTION(infoService, "Could not find a system info service");
+
+ int32_t version;
+ nsresult rv = infoService->GetPropertyAsInt32(NS_LITERAL_STRING("version"), &version);
+ if (NS_SUCCEEDED(rv)) {
+ ALOG("Android Version is: %d", version);
+ }
+
+ nsAutoString release_version;
+ rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("release_version"), release_version);
+ if (NS_SUCCEEDED(rv)) {
+ ALOG("Android Release Version is: %s", NS_LossyConvertUTF16toASCII(release_version).get());
+ }
+
+ nsAutoString device;
+ rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("device"), device);
+ if (NS_SUCCEEDED(rv)) {
+ ALOG("Android Device is: %s", NS_LossyConvertUTF16toASCII(device).get());
+ }
+
+ nsAutoString manufacturer;
+ rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("manufacturer"), manufacturer);
+ if (NS_SUCCEEDED(rv)) {
+ ALOG("Android Manufacturer is: %s", NS_LossyConvertUTF16toASCII(manufacturer).get());
+ }
+
+ nsAutoString hardware;
+ rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("hardware"), hardware);
+ if (NS_SUCCEEDED(rv)) {
+ ALOG("Android Hardware is: %s", NS_LossyConvertUTF16toASCII(hardware).get());
+ }
+#endif
+
+ if (!IsOmxSupported())
+ return nullptr;
+
+#if defined(ANDROID) && !defined(MOZ_WIDGET_GONK)
+ if (version >= 17) {
+ return "libomxpluginkk.so";
+ }
+
+ // Ice Cream Sandwich and Jellybean
+ return "libomxplugin.so";
+
+#elif defined(ANDROID) && defined(MOZ_WIDGET_GONK)
+ return "libomxplugin.so";
+#else
+ return nullptr;
+#endif
+}
+
+AndroidMediaPluginHost::AndroidMediaPluginHost() {
+ MOZ_COUNT_CTOR(AndroidMediaPluginHost);
+ MOZ_ASSERT(NS_IsMainThread());
+
+ mResourceServer = AndroidMediaResourceServer::Start();
+
+ const char* name = GetOmxLibraryName();
+ ALOG("Loading OMX Plugin: %s", name ? name : "nullptr");
+ if (name) {
+ char *path = PR_GetLibraryFilePathname("libxul.so", (PRFuncPtr) GetOmxLibraryName);
+ PRLibrary *lib = nullptr;
+ if (path) {
+ nsAutoCString libpath(path);
+ PR_Free(path);
+ int32_t slash = libpath.RFindChar('/');
+ if (slash != kNotFound) {
+ libpath.Truncate(slash + 1);
+ libpath.Append(name);
+ lib = PR_LoadLibrary(libpath.get());
+ }
+ }
+ if (!lib)
+ lib = PR_LoadLibrary(name);
+
+ if (lib) {
+ Manifest *manifest = static_cast<Manifest *>(PR_FindSymbol(lib, "MPAPI_MANIFEST"));
+ if (manifest) {
+ mPlugins.AppendElement(manifest);
+ ALOG("OMX plugin successfully loaded");
+ }
+ }
+ }
+}
+
+AndroidMediaPluginHost::~AndroidMediaPluginHost() {
+ mResourceServer->Stop();
+ MOZ_COUNT_DTOR(AndroidMediaPluginHost);
+}
+
+bool AndroidMediaPluginHost::FindDecoder(const nsACString& aMimeType, const char* const** aCodecs)
+{
+ const char *chars;
+ size_t len = NS_CStringGetData(aMimeType, &chars, nullptr);
+ for (size_t n = 0; n < mPlugins.Length(); ++n) {
+ Manifest *plugin = mPlugins[n];
+ const char* const *codecs;
+ if (plugin->CanDecode(chars, len, &codecs)) {
+ if (aCodecs)
+ *aCodecs = codecs;
+ return true;
+ }
+ }
+ return false;
+}
+
+MPAPI::Decoder *AndroidMediaPluginHost::CreateDecoder(MediaResource *aResource, const nsACString& aMimeType)
+{
+ NS_ENSURE_TRUE(aResource, nullptr);
+
+ nsAutoPtr<Decoder> decoder(new Decoder());
+ if (!decoder) {
+ return nullptr;
+ }
+
+ const char *chars;
+ size_t len = NS_CStringGetData(aMimeType, &chars, nullptr);
+ for (size_t n = 0; n < mPlugins.Length(); ++n) {
+ Manifest *plugin = mPlugins[n];
+ const char* const *codecs;
+ if (!plugin->CanDecode(chars, len, &codecs)) {
+ continue;
+ }
+
+ nsCString url;
+ nsresult rv = mResourceServer->AddResource(aResource, url);
+ if (NS_FAILED (rv)) continue;
+
+ decoder->mResource = strdup(url.get());
+ if (plugin->CreateDecoder(&sPluginHost, decoder, chars, len)) {
+ return decoder.forget();
+ }
+ }
+
+ return nullptr;
+}
+
+void AndroidMediaPluginHost::DestroyDecoder(Decoder *aDecoder)
+{
+ aDecoder->DestroyDecoder(aDecoder);
+ char* resource = GetResource(aDecoder);
+ if (resource) {
+ // resource *shouldn't* be null, but check anyway just in case the plugin
+ // decoder does something stupid.
+ mResourceServer->RemoveResource(nsCString(resource));
+ free(resource);
+ }
+ delete aDecoder;
+}
+
+AndroidMediaPluginHost *sAndroidMediaPluginHost = nullptr;
+AndroidMediaPluginHost *EnsureAndroidMediaPluginHost()
+{
+ MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
+ if (!sAndroidMediaPluginHost) {
+ sAndroidMediaPluginHost = new AndroidMediaPluginHost();
+ }
+ return sAndroidMediaPluginHost;
+}
+
+AndroidMediaPluginHost *GetAndroidMediaPluginHost()
+{
+ MOZ_ASSERT(sAndroidMediaPluginHost);
+ return sAndroidMediaPluginHost;
+}
+
+void AndroidMediaPluginHost::Shutdown()
+{
+ delete sAndroidMediaPluginHost;
+ sAndroidMediaPluginHost = nullptr;
+}
+
+} // namespace mozilla
diff --git a/dom/media/android/AndroidMediaPluginHost.h b/dom/media/android/AndroidMediaPluginHost.h
new file mode 100644
index 000000000..854b7f21e
--- /dev/null
+++ b/dom/media/android/AndroidMediaPluginHost.h
@@ -0,0 +1,41 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(AndroidMediaPluginHost_h_)
+#define AndroidMediaPluginHost_h_
+
+#include "nsTArray.h"
+#include "MediaResource.h"
+#include "MPAPI.h"
+#include "AndroidMediaResourceServer.h"
+
+namespace mozilla {
+
+class AndroidMediaPluginHost {
+ RefPtr<AndroidMediaResourceServer> mResourceServer;
+ nsTArray<MPAPI::Manifest *> mPlugins;
+
+ MPAPI::Manifest *FindPlugin(const nsACString& aMimeType);
+public:
+ AndroidMediaPluginHost();
+ ~AndroidMediaPluginHost();
+
+ static void Shutdown();
+
+ bool FindDecoder(const nsACString& aMimeType, const char* const** aCodecs);
+ MPAPI::Decoder *CreateDecoder(mozilla::MediaResource *aResource, const nsACString& aMimeType);
+ void DestroyDecoder(MPAPI::Decoder *aDecoder);
+};
+
+// Must be called on the main thread. Creates the plugin host if it doesn't
+// already exist.
+AndroidMediaPluginHost *EnsureAndroidMediaPluginHost();
+
+// May be called on any thread after EnsureAndroidMediaPluginHost has been called.
+AndroidMediaPluginHost *GetAndroidMediaPluginHost();
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/android/AndroidMediaReader.cpp b/dom/media/android/AndroidMediaReader.cpp
new file mode 100644
index 000000000..12afacbc9
--- /dev/null
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -0,0 +1,449 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include "AndroidMediaReader.h"
+#include "mozilla/TimeStamp.h"
+#include "mozilla/gfx/Point.h"
+#include "MediaResource.h"
+#include "VideoUtils.h"
+#include "AndroidMediaDecoder.h"
+#include "AndroidMediaPluginHost.h"
+#include "MediaDecoderStateMachine.h"
+#include "ImageContainer.h"
+#include "AbstractMediaDecoder.h"
+#include "gfx2DGlue.h"
+#include "VideoFrameContainer.h"
+#include "mozilla/CheckedInt.h"
+
+namespace mozilla {
+
+using namespace mozilla::gfx;
+using namespace mozilla::media;
+
+typedef mozilla::layers::Image Image;
+typedef mozilla::layers::PlanarYCbCrImage PlanarYCbCrImage;
+
+AndroidMediaReader::AndroidMediaReader(AbstractMediaDecoder *aDecoder,
+ const nsACString& aContentType) :
+ MediaDecoderReader(aDecoder),
+ mType(aContentType),
+ mPlugin(nullptr),
+ mHasAudio(false),
+ mHasVideo(false),
+ mVideoSeekTimeUs(-1),
+ mAudioSeekTimeUs(-1)
+{
+}
+
+nsresult AndroidMediaReader::ReadMetadata(MediaInfo* aInfo,
+ MetadataTags** aTags)
+{
+ MOZ_ASSERT(OnTaskQueue());
+
+ if (!mPlugin) {
+ mPlugin = GetAndroidMediaPluginHost()->CreateDecoder(mDecoder->GetResource(), mType);
+ if (!mPlugin) {
+ return NS_ERROR_FAILURE;
+ }
+ }
+
+ // Set the total duration (the max of the audio and video track).
+ int64_t durationUs;
+ mPlugin->GetDuration(mPlugin, &durationUs);
+ if (durationUs) {
+ mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(durationUs));
+ }
+
+ if (mPlugin->HasVideo(mPlugin)) {
+ int32_t width, height;
+ mPlugin->GetVideoParameters(mPlugin, &width, &height);
+ nsIntRect pictureRect(0, 0, width, height);
+
+ // Validate the container-reported frame and pictureRect sizes. This ensures
+ // that our video frame creation code doesn't overflow.
+ nsIntSize displaySize(width, height);
+ nsIntSize frameSize(width, height);
+ if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
+ return NS_ERROR_FAILURE;
+ }
+
+ // Video track's frame sizes will not overflow. Activate the video track.
+ mHasVideo = true;
+ mInfo.mVideo.mDisplay = displaySize;
+ mPicture = pictureRect;
+ mInitialFrame = frameSize;
+ VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
+ if (container) {
+ container->ClearCurrentFrame(IntSize(displaySize.width, displaySize.height));
+ }
+ }
+
+ if (mPlugin->HasAudio(mPlugin)) {
+ int32_t numChannels, sampleRate;
+ mPlugin->GetAudioParameters(mPlugin, &numChannels, &sampleRate);
+ mHasAudio = true;
+ mInfo.mAudio.mChannels = numChannels;
+ mInfo.mAudio.mRate = sampleRate;
+ }
+
+ *aInfo = mInfo;
+ *aTags = nullptr;
+ return NS_OK;
+}
+
+RefPtr<ShutdownPromise>
+AndroidMediaReader::Shutdown()
+{
+ ResetDecode();
+ if (mPlugin) {
+ GetAndroidMediaPluginHost()->DestroyDecoder(mPlugin);
+ mPlugin = nullptr;
+ }
+
+ return MediaDecoderReader::Shutdown();
+}
+
+// Resets all state related to decoding, emptying all buffers etc.
+nsresult AndroidMediaReader::ResetDecode(TrackSet aTracks)
+{
+ if (mLastVideoFrame) {
+ mLastVideoFrame = nullptr;
+ }
+ mSeekRequest.DisconnectIfExists();
+ mSeekPromise.RejectIfExists(NS_OK, __func__);
+ return MediaDecoderReader::ResetDecode(aTracks);
+}
+
+bool AndroidMediaReader::DecodeVideoFrame(bool &aKeyframeSkip,
+ int64_t aTimeThreshold)
+{
+ // Record number of frames decoded and parsed. Automatically update the
+ // stats counters using the AutoNotifyDecoded stack-based class.
+ AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
+
+ // Throw away the currently buffered frame if we are seeking.
+ if (mLastVideoFrame && mVideoSeekTimeUs != -1) {
+ mLastVideoFrame = nullptr;
+ }
+
+ ImageBufferCallback bufferCallback(mDecoder->GetImageContainer());
+ RefPtr<Image> currentImage;
+
+ // Read next frame
+ while (true) {
+ MPAPI::VideoFrame frame;
+ if (!mPlugin->ReadVideo(mPlugin, &frame, mVideoSeekTimeUs, &bufferCallback)) {
+ // We reached the end of the video stream. If we have a buffered
+ // video frame, push it the video queue using the total duration
+ // of the video as the end time.
+ if (mLastVideoFrame) {
+ int64_t durationUs;
+ mPlugin->GetDuration(mPlugin, &durationUs);
+ durationUs = std::max<int64_t>(durationUs - mLastVideoFrame->mTime, 0);
+ RefPtr<VideoData> data = VideoData::ShallowCopyUpdateDuration(mLastVideoFrame,
+ durationUs);
+ mVideoQueue.Push(data);
+ mLastVideoFrame = nullptr;
+ }
+ return false;
+ }
+ mVideoSeekTimeUs = -1;
+
+ if (aKeyframeSkip) {
+ // Disable keyframe skipping for now as
+ // stagefright doesn't seem to be telling us
+ // when a frame is a keyframe.
+#if 0
+ if (!frame.mKeyFrame) {
+ ++a.mStats.mParsedFrames;
+ ++a.mStats.mDroppedFrames;
+ continue;
+ }
+#endif
+ aKeyframeSkip = false;
+ }
+
+ if (frame.mSize == 0)
+ return true;
+
+ currentImage = bufferCallback.GetImage();
+ int64_t pos = mDecoder->GetResource()->Tell();
+ IntRect picture = mPicture;
+
+ RefPtr<VideoData> v;
+ if (currentImage) {
+ gfx::IntSize frameSize = currentImage->GetSize();
+ if (frameSize.width != mInitialFrame.width ||
+ frameSize.height != mInitialFrame.height) {
+ // Frame size is different from what the container reports. This is legal,
+ // and we will preserve the ratio of the crop rectangle as it
+ // was reported relative to the picture size reported by the container.
+ picture.x = (mPicture.x * frameSize.width) / mInitialFrame.width;
+ picture.y = (mPicture.y * frameSize.height) / mInitialFrame.height;
+ picture.width = (frameSize.width * mPicture.width) / mInitialFrame.width;
+ picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
+ }
+
+ v = VideoData::CreateFromImage(mInfo.mVideo,
+ pos,
+ frame.mTimeUs,
+ 1, // We don't know the duration yet.
+ currentImage,
+ frame.mKeyFrame,
+ -1,
+ picture);
+ } else {
+ // Assume YUV
+ VideoData::YCbCrBuffer b;
+ b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
+ b.mPlanes[0].mStride = frame.Y.mStride;
+ b.mPlanes[0].mHeight = frame.Y.mHeight;
+ b.mPlanes[0].mWidth = frame.Y.mWidth;
+ b.mPlanes[0].mOffset = frame.Y.mOffset;
+ b.mPlanes[0].mSkip = frame.Y.mSkip;
+
+ b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
+ b.mPlanes[1].mStride = frame.Cb.mStride;
+ b.mPlanes[1].mHeight = frame.Cb.mHeight;
+ b.mPlanes[1].mWidth = frame.Cb.mWidth;
+ b.mPlanes[1].mOffset = frame.Cb.mOffset;
+ b.mPlanes[1].mSkip = frame.Cb.mSkip;
+
+ b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
+ b.mPlanes[2].mStride = frame.Cr.mStride;
+ b.mPlanes[2].mHeight = frame.Cr.mHeight;
+ b.mPlanes[2].mWidth = frame.Cr.mWidth;
+ b.mPlanes[2].mOffset = frame.Cr.mOffset;
+ b.mPlanes[2].mSkip = frame.Cr.mSkip;
+
+ if (frame.Y.mWidth != mInitialFrame.width ||
+ frame.Y.mHeight != mInitialFrame.height) {
+
+ // Frame size is different from what the container reports. This is legal,
+ // and we will preserve the ratio of the crop rectangle as it
+ // was reported relative to the picture size reported by the container.
+ picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
+ picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
+ picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
+ picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
+ }
+
+ // This is the approximate byte position in the stream.
+ v = VideoData::CreateAndCopyData(mInfo.mVideo,
+ mDecoder->GetImageContainer(),
+ pos,
+ frame.mTimeUs,
+ 1, // We don't know the duration yet.
+ b,
+ frame.mKeyFrame,
+ -1,
+ picture);
+ }
+
+ if (!v) {
+ return false;
+ }
+ a.mStats.mParsedFrames++;
+ a.mStats.mDecodedFrames++;
+ NS_ASSERTION(a.mStats.mDecodedFrames <= a.mStats.mParsedFrames, "Expect to decode fewer frames than parsed in AndroidMedia...");
+
+ // Since MPAPI doesn't give us the end time of frames, we keep one frame
+ // buffered in AndroidMediaReader and push it into the queue as soon
+ // we read the following frame so we can use that frame's start time as
+ // the end time of the buffered frame.
+ if (!mLastVideoFrame) {
+ mLastVideoFrame = v;
+ continue;
+ }
+
+ // Calculate the duration as the timestamp of the current frame minus the
+ // timestamp of the previous frame. We can then return the previously
+ // decoded frame, and it will have a valid timestamp.
+ int64_t duration = v->mTime - mLastVideoFrame->mTime;
+ mLastVideoFrame = VideoData::ShallowCopyUpdateDuration(mLastVideoFrame, duration);
+
+ // We have the start time of the next frame, so we can push the previous
+ // frame into the queue, except if the end time is below the threshold,
+ // in which case it wouldn't be displayed anyway.
+ if (mLastVideoFrame->GetEndTime() < aTimeThreshold) {
+ mLastVideoFrame = nullptr;
+ continue;
+ }
+
+ // Buffer the current frame we just decoded.
+ mVideoQueue.Push(mLastVideoFrame);
+ mLastVideoFrame = v;
+
+ break;
+ }
+
+ return true;
+}
+
+bool AndroidMediaReader::DecodeAudioData()
+{
+ MOZ_ASSERT(OnTaskQueue());
+
+ // This is the approximate byte position in the stream.
+ int64_t pos = mDecoder->GetResource()->Tell();
+
+ // Read next frame
+ MPAPI::AudioFrame source;
+ if (!mPlugin->ReadAudio(mPlugin, &source, mAudioSeekTimeUs)) {
+ return false;
+ }
+ mAudioSeekTimeUs = -1;
+
+ // Ignore empty buffers which stagefright media read will sporadically return
+ if (source.mSize == 0)
+ return true;
+
+ uint32_t frames = source.mSize / (source.mAudioChannels *
+ sizeof(AudioDataValue));
+
+ typedef AudioCompactor::NativeCopy MPCopy;
+ return mAudioCompactor.Push(pos,
+ source.mTimeUs,
+ source.mAudioSampleRate,
+ frames,
+ source.mAudioChannels,
+ MPCopy(static_cast<uint8_t *>(source.mData),
+ source.mSize,
+ source.mAudioChannels));
+}
+
+RefPtr<MediaDecoderReader::SeekPromise>
+AndroidMediaReader::Seek(SeekTarget aTarget, int64_t aEndTime)
+{
+ MOZ_ASSERT(OnTaskQueue());
+
+ RefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);
+ if (mHasAudio && mHasVideo) {
+ // The decoder seeks/demuxes audio and video streams separately. So if
+ // we seek both audio and video to aTarget, the audio stream can typically
+ // seek closer to the seek target, since typically every audio block is
+ // a sync point, whereas for video there are only keyframes once every few
+ // seconds. So if we have both audio and video, we must seek the video
+ // stream to the preceeding keyframe first, get the stream time, and then
+ // seek the audio stream to match the video stream's time. Otherwise, the
+ // audio and video streams won't be in sync after the seek.
+ mVideoSeekTimeUs = aTarget.GetTime().ToMicroseconds();
+
+ RefPtr<AndroidMediaReader> self = this;
+ mSeekRequest.Begin(DecodeToFirstVideoData()->Then(OwnerThread(), __func__, [self] (MediaData* v) {
+ self->mSeekRequest.Complete();
+ self->mAudioSeekTimeUs = v->mTime;
+ self->mSeekPromise.Resolve(media::TimeUnit::FromMicroseconds(self->mAudioSeekTimeUs), __func__);
+ }, [self, aTarget] () {
+ self->mSeekRequest.Complete();
+ self->mAudioSeekTimeUs = aTarget.GetTime().ToMicroseconds();
+ self->mSeekPromise.Resolve(aTarget.GetTime(), __func__);
+ }));
+ } else {
+ mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget.GetTime().ToMicroseconds();
+ mSeekPromise.Resolve(aTarget.GetTime(), __func__);
+ }
+
+ return p;
+}
+
+AndroidMediaReader::ImageBufferCallback::ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer) :
+ mImageContainer(aImageContainer)
+{
+}
+
+void *
+AndroidMediaReader::ImageBufferCallback::operator()(size_t aWidth, size_t aHeight,
+ MPAPI::ColorFormat aColorFormat)
+{
+ if (!mImageContainer) {
+ NS_WARNING("No image container to construct an image");
+ return nullptr;
+ }
+
+ RefPtr<Image> image;
+ switch(aColorFormat) {
+ case MPAPI::RGB565:
+ image = mozilla::layers::CreateSharedRGBImage(mImageContainer,
+ nsIntSize(aWidth, aHeight),
+ SurfaceFormat::R5G6B5_UINT16);
+ if (!image) {
+ NS_WARNING("Could not create rgb image");
+ return nullptr;
+ }
+
+ mImage = image;
+ return image->GetBuffer();
+ case MPAPI::I420:
+ return CreateI420Image(aWidth, aHeight);
+ default:
+ NS_NOTREACHED("Color format not supported");
+ return nullptr;
+ }
+}
+
+uint8_t *
+AndroidMediaReader::ImageBufferCallback::CreateI420Image(size_t aWidth,
+ size_t aHeight)
+{
+ RefPtr<PlanarYCbCrImage> yuvImage = mImageContainer->CreatePlanarYCbCrImage();
+ mImage = yuvImage;
+
+ if (!yuvImage) {
+ NS_WARNING("Could not create I420 image");
+ return nullptr;
+ }
+
+ // Use uint32_t throughout to match AllocateAndGetNewBuffer's param
+ const auto checkedFrameSize =
+ CheckedInt<uint32_t>(aWidth) * aHeight;
+
+ // Allocate enough for one full resolution Y plane
+ // and two quarter resolution Cb/Cr planes.
+ const auto checkedBufferSize =
+ checkedFrameSize + checkedFrameSize / 2;
+
+ if (!checkedBufferSize.isValid()) { // checks checkedFrameSize too
+ NS_WARNING("Could not create I420 image");
+ return nullptr;
+ }
+
+ const auto frameSize = checkedFrameSize.value();
+
+ uint8_t *buffer =
+ yuvImage->AllocateAndGetNewBuffer(checkedBufferSize.value());
+
+ mozilla::layers::PlanarYCbCrData frameDesc;
+
+ frameDesc.mYChannel = buffer;
+ frameDesc.mCbChannel = buffer + frameSize;
+ frameDesc.mCrChannel = frameDesc.mCbChannel + frameSize / 4;
+
+ frameDesc.mYSize = IntSize(aWidth, aHeight);
+ frameDesc.mCbCrSize = IntSize(aWidth / 2, aHeight / 2);
+
+ frameDesc.mYStride = aWidth;
+ frameDesc.mCbCrStride = aWidth / 2;
+
+ frameDesc.mYSkip = 0;
+ frameDesc.mCbSkip = 0;
+ frameDesc.mCrSkip = 0;
+
+ frameDesc.mPicX = 0;
+ frameDesc.mPicY = 0;
+ frameDesc.mPicSize = IntSize(aWidth, aHeight);
+
+ yuvImage->AdoptData(frameDesc);
+
+ return buffer;
+}
+
+already_AddRefed<Image>
+AndroidMediaReader::ImageBufferCallback::GetImage()
+{
+ return mImage.forget();
+}
+
+} // namespace mozilla
diff --git a/dom/media/android/AndroidMediaReader.h b/dom/media/android/AndroidMediaReader.h
new file mode 100644
index 000000000..def85a343
--- /dev/null
+++ b/dom/media/android/AndroidMediaReader.h
@@ -0,0 +1,75 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(AndroidMediaReader_h_)
+#define AndroidMediaReader_h_
+
+#include "mozilla/Attributes.h"
+#include "MediaResource.h"
+#include "MediaDecoderReader.h"
+#include "ImageContainer.h"
+#include "mozilla/layers/SharedRGBImage.h"
+
+#include "MPAPI.h"
+
+class nsACString;
+
+namespace mozilla {
+
+class AbstractMediaDecoder;
+
+namespace layers {
+class ImageContainer;
+}
+
+class AndroidMediaReader : public MediaDecoderReader
+{
+ nsCString mType;
+ MPAPI::Decoder *mPlugin;
+ bool mHasAudio;
+ bool mHasVideo;
+ nsIntRect mPicture;
+ nsIntSize mInitialFrame;
+ int64_t mVideoSeekTimeUs;
+ int64_t mAudioSeekTimeUs;
+ RefPtr<VideoData> mLastVideoFrame;
+ MozPromiseHolder<MediaDecoderReader::SeekPromise> mSeekPromise;
+ MozPromiseRequestHolder<MediaDecoderReader::MediaDataPromise> mSeekRequest;
+public:
+ AndroidMediaReader(AbstractMediaDecoder* aDecoder,
+ const nsACString& aContentType);
+
+ nsresult ResetDecode(TrackSet aTracks = TrackSet(TrackInfo::kAudioTrack,
+ TrackInfo::kVideoTrack)) override;
+
+ bool DecodeAudioData() override;
+ bool DecodeVideoFrame(bool &aKeyframeSkip, int64_t aTimeThreshold) override;
+
+ nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) override;
+ RefPtr<SeekPromise> Seek(SeekTarget aTarget, int64_t aEndTime) override;
+
+ RefPtr<ShutdownPromise> Shutdown() override;
+
+ class ImageBufferCallback : public MPAPI::BufferCallback {
+ typedef mozilla::layers::Image Image;
+
+ public:
+ ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer);
+ void *operator()(size_t aWidth, size_t aHeight,
+ MPAPI::ColorFormat aColorFormat) override;
+ already_AddRefed<Image> GetImage();
+
+ private:
+ uint8_t *CreateI420Image(size_t aWidth, size_t aHeight);
+
+ mozilla::layers::ImageContainer *mImageContainer;
+ RefPtr<Image> mImage;
+ };
+
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/android/AndroidMediaResourceServer.cpp b/dom/media/android/AndroidMediaResourceServer.cpp
new file mode 100644
index 000000000..bd76a8c68
--- /dev/null
+++ b/dom/media/android/AndroidMediaResourceServer.cpp
@@ -0,0 +1,503 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include "mozilla/Assertions.h"
+#include "mozilla/Base64.h"
+#include "mozilla/IntegerPrintfMacros.h"
+#include "mozilla/UniquePtr.h"
+#include "nsThreadUtils.h"
+#include "nsIServiceManager.h"
+#include "nsISocketTransport.h"
+#include "nsIOutputStream.h"
+#include "nsIInputStream.h"
+#include "nsIRandomGenerator.h"
+#include "nsReadLine.h"
+#include "nsNetCID.h"
+#include "VideoUtils.h"
+#include "MediaResource.h"
+#include "AndroidMediaResourceServer.h"
+
+#if defined(_MSC_VER)
+#define strtoll _strtoi64
+#endif
+
+using namespace mozilla;
+
+/*
+ ReadCRLF is a variant of NS_ReadLine from nsReadLine.h that deals
+ with the carriage return/line feed requirements of HTTP requests.
+*/
+template<typename CharT, class StreamType, class StringType>
+nsresult
+ReadCRLF (StreamType* aStream, nsLineBuffer<CharT> * aBuffer,
+ StringType & aLine, bool *aMore)
+{
+ // eollast is true if the last character in the buffer is a '\r',
+ // signaling a potential '\r\n' sequence split between reads.
+ bool eollast = false;
+
+ aLine.Truncate();
+
+ while (1) { // will be returning out of this loop on eol or eof
+ if (aBuffer->start == aBuffer->end) { // buffer is empty. Read into it.
+ uint32_t bytesRead;
+ nsresult rv = aStream->Read(aBuffer->buf, kLineBufferSize, &bytesRead);
+ if (NS_FAILED(rv) || bytesRead == 0) {
+ *aMore = false;
+ return rv;
+ }
+ aBuffer->start = aBuffer->buf;
+ aBuffer->end = aBuffer->buf + bytesRead;
+ *(aBuffer->end) = '\0';
+ }
+
+ /*
+ * Walk the buffer looking for an end-of-line.
+ * There are 4 cases to consider:
+ * 1. the CR char is the last char in the buffer
+ * 2. the CRLF sequence are the last characters in the buffer
+ * 3. the CRLF sequence + one or more chars at the end of the buffer
+ * we need at least one char after the first CRLF sequence to
+ * set |aMore| correctly.
+ * 4. The LF character is the first char in the buffer when eollast is
+ * true.
+ */
+ CharT* current = aBuffer->start;
+ if (eollast) { // Case 4
+ if (*current == '\n') {
+ aBuffer->start = ++current;
+ *aMore = true;
+ return NS_OK;
+ }
+ else {
+ eollast = false;
+ aLine.Append('\r');
+ }
+ }
+ // Cases 2 and 3
+ for ( ; current < aBuffer->end-1; ++current) {
+ if (*current == '\r' && *(current+1) == '\n') {
+ *current++ = '\0';
+ *current++ = '\0';
+ aLine.Append(aBuffer->start);
+ aBuffer->start = current;
+ *aMore = true;
+ return NS_OK;
+ }
+ }
+ // Case 1
+ if (*current == '\r') {
+ eollast = true;
+ *current++ = '\0';
+ }
+
+ aLine.Append(aBuffer->start);
+ aBuffer->start = aBuffer->end; // mark the buffer empty
+ }
+}
+
+// Each client HTTP request results in a thread being spawned to process it.
+// That thread has a single event dispatched to it which handles the HTTP
+// protocol. It parses the headers and forwards data from the MediaResource
+// associated with the URL back to client. When the request is complete it will
+// shutdown the thread.
+class ServeResourceEvent : public Runnable {
+private:
+ // Reading from this reads the data sent from the client.
+ nsCOMPtr<nsIInputStream> mInput;
+
+ // Writing to this sends data to the client.
+ nsCOMPtr<nsIOutputStream> mOutput;
+
+ // The AndroidMediaResourceServer that owns the MediaResource instances
+ // served. This is used to lookup the MediaResource from the URL.
+ RefPtr<AndroidMediaResourceServer> mServer;
+
+ // Write 'aBufferLength' bytes from 'aBuffer' to 'mOutput'. This
+ // method ensures all the data is written by checking the number
+ // of bytes returned from the output streams 'Write' method and
+ // looping until done.
+ nsresult WriteAll(char const* aBuffer, int32_t aBufferLength);
+
+public:
+ ServeResourceEvent(nsIInputStream* aInput, nsIOutputStream* aOutput,
+ AndroidMediaResourceServer* aServer)
+ : mInput(aInput), mOutput(aOutput), mServer(aServer) {}
+
+ // This method runs on the thread and exits when it has completed the
+ // HTTP request.
+ NS_IMETHOD Run();
+
+ // Given the first line of an HTTP request, parse the URL requested and
+ // return the MediaResource for that URL.
+ already_AddRefed<MediaResource> GetMediaResource(nsCString const& aHTTPRequest);
+
+ // Gracefully shutdown the thread and cleanup resources
+ void Shutdown();
+};
+
+nsresult
+ServeResourceEvent::WriteAll(char const* aBuffer, int32_t aBufferLength)
+{
+ while (aBufferLength > 0) {
+ uint32_t written = 0;
+ nsresult rv = mOutput->Write(aBuffer, aBufferLength, &written);
+ if (NS_FAILED (rv)) return rv;
+
+ aBufferLength -= written;
+ aBuffer += written;
+ }
+
+ return NS_OK;
+}
+
+already_AddRefed<MediaResource>
+ServeResourceEvent::GetMediaResource(nsCString const& aHTTPRequest)
+{
+ // Check that the HTTP method is GET
+ const char* HTTP_METHOD = "GET ";
+ if (strncmp(aHTTPRequest.get(), HTTP_METHOD, strlen(HTTP_METHOD)) != 0) {
+ return nullptr;
+ }
+
+ const char* url_start = strchr(aHTTPRequest.get(), ' ');
+ if (!url_start) {
+ return nullptr;
+ }
+
+ const char* url_end = strrchr(++url_start, ' ');
+ if (!url_end) {
+ return nullptr;
+ }
+
+ // The path extracted from the HTTP request is used as a key in hash
+ // table. It is not related to retrieving data from the filesystem so
+ // we don't need to do any sanity checking on ".." paths and similar
+ // exploits.
+ nsCString relative(url_start, url_end - url_start);
+ RefPtr<MediaResource> resource =
+ mServer->GetResource(mServer->GetURLPrefix() + relative);
+ return resource.forget();
+}
+
+NS_IMETHODIMP
+ServeResourceEvent::Run() {
+ bool more = false; // Are there HTTP headers to read after the first line
+ nsCString line; // Contains the current line read from input stream
+ nsLineBuffer<char>* buffer = new nsLineBuffer<char>();
+ nsresult rv = ReadCRLF(mInput.get(), buffer, line, &more);
+ if (NS_FAILED(rv)) { Shutdown(); return rv; }
+
+ // First line contains the HTTP GET request. Extract the URL and obtain
+ // the MediaResource for it.
+ RefPtr<MediaResource> resource = GetMediaResource(line);
+ if (!resource) {
+ const char* response_404 = "HTTP/1.1 404 Not Found\r\n"
+ "Content-Length: 0\r\n\r\n";
+ rv = WriteAll(response_404, strlen(response_404));
+ Shutdown();
+ return rv;
+ }
+
+ // Offset in bytes to start reading from resource.
+ // This is zero by default but can be set to another starting value if
+ // this HTTP request includes a byte range request header.
+ int64_t start = 0;
+
+ // Keep reading lines until we get a zero length line, which is the HTTP
+ // protocol's way of signifying the end of headers and start of body, or
+ // until we have no more data to read.
+ while (more && line.Length() > 0) {
+ rv = ReadCRLF(mInput.get(), buffer, line, &more);
+ if (NS_FAILED(rv)) { Shutdown(); return rv; }
+
+ // Look for a byte range request header. If there is one, set the
+ // media resource offset to start from to that requested. Here we
+ // only check for the range request format used by Android rather
+ // than implementing all possibilities in the HTTP specification.
+ // That is, the range request is of the form:
+ // Range: bytes=nnnn-
+ // Were 'nnnn' is an integer number.
+ // The end of the range is not checked, instead we return up to
+ // the end of the resource and the client is informed of this via
+ // the content-range header.
+ NS_NAMED_LITERAL_CSTRING(byteRange, "Range: bytes=");
+ const char* s = strstr(line.get(), byteRange.get());
+ if (s) {
+ start = strtoll(s+byteRange.Length(), nullptr, 10);
+
+ // Clamp 'start' to be between 0 and the resource length.
+ start = std::max(int64_t(0), std::min(resource->GetLength(), start));
+ }
+ }
+
+ // HTTP response to use if this is a non byte range request
+ const char* response_normal = "HTTP/1.1 200 OK\r\n";
+
+ // HTTP response to use if this is a byte range request
+ const char* response_range = "HTTP/1.1 206 Partial Content\r\n";
+
+ // End of HTTP reponse headers is indicated by an empty line.
+ const char* response_end = "\r\n";
+
+ // If the request was a byte range request, we need to read from the
+ // requested offset. If the resource is non-seekable, or the seek
+ // fails, then the start offset is set back to zero. This results in all
+ // HTTP response data being as if the byte range request was not made.
+ if (start > 0 && !resource->IsTransportSeekable()) {
+ start = 0;
+ }
+
+ const char* response_line = start > 0 ?
+ response_range :
+ response_normal;
+ rv = WriteAll(response_line, strlen(response_line));
+ if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
+
+ // Buffer used for reading from the input stream and writing to
+ // the output stream. The buffer size should be big enough for the
+ // HTTP response headers sent below. A static_assert ensures
+ // this where the buffer is used.
+ const int buffer_size = 32768;
+ auto b = MakeUnique<char[]>(buffer_size);
+
+ // If we know the length of the resource, send a Content-Length header.
+ int64_t contentlength = resource->GetLength() - start;
+ if (contentlength > 0) {
+ static_assert (buffer_size > 1024,
+ "buffer_size must be large enough "
+ "to hold response headers");
+ snprintf(b.get(), buffer_size, "Content-Length: %" PRId64 "\r\n", contentlength);
+ rv = WriteAll(b.get(), strlen(b.get()));
+ if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
+ }
+
+ // If the request was a byte range request, respond with a Content-Range
+ // header which details the extent of the data returned.
+ if (start > 0) {
+ static_assert (buffer_size > 1024,
+ "buffer_size must be large enough "
+ "to hold response headers");
+ snprintf(b.get(), buffer_size, "Content-Range: "
+ "bytes %" PRId64 "-%" PRId64 "/%" PRId64 "\r\n",
+ start, resource->GetLength() - 1, resource->GetLength());
+ rv = WriteAll(b.get(), strlen(b.get()));
+ if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
+ }
+
+ rv = WriteAll(response_end, strlen(response_end));
+ if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
+
+ rv = mOutput->Flush();
+ if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
+
+ // Read data from media resource
+ uint32_t bytesRead = 0; // Number of bytes read/written to streams
+ rv = resource->ReadAt(start, b.get(), buffer_size, &bytesRead);
+ while (NS_SUCCEEDED(rv) && bytesRead != 0) {
+ // Keep track of what we think the starting position for the next read
+ // is. This is used in subsequent ReadAt calls to ensure we are reading
+ // from the correct offset in the case where another thread is reading
+ // from th same MediaResource.
+ start += bytesRead;
+
+ // Write data obtained from media resource to output stream
+ rv = WriteAll(b.get(), bytesRead);
+ if (NS_FAILED (rv)) break;
+
+ rv = resource->ReadAt(start, b.get(), 32768, &bytesRead);
+ }
+
+ Shutdown();
+ return NS_OK;
+}
+
+void
+ServeResourceEvent::Shutdown()
+{
+ // Cleanup resources and exit.
+ mInput->Close();
+ mOutput->Close();
+
+ // To shutdown the current thread we need to first exit this event.
+ // The Shutdown event below is posted to the main thread to do this.
+ nsCOMPtr<nsIRunnable> event = new ShutdownThreadEvent(NS_GetCurrentThread());
+ NS_DispatchToMainThread(event);
+}
+
+/*
+ This is the listener attached to the server socket. When an HTTP
+ request is made by the client the OnSocketAccepted method is
+ called. This method will spawn a thread to process the request.
+ The thread receives a single event which does the parsing of
+ the HTTP request and forwarding the data from the MediaResource
+ to the output stream of the request.
+
+ The MediaResource used for providing the request data is obtained
+ from the AndroidMediaResourceServer that created this listener, using the
+ URL the client requested.
+*/
+class ResourceSocketListener : public nsIServerSocketListener
+{
+public:
+ // The AndroidMediaResourceServer used to look up the MediaResource
+ // on requests.
+ RefPtr<AndroidMediaResourceServer> mServer;
+
+ NS_DECL_THREADSAFE_ISUPPORTS
+ NS_DECL_NSISERVERSOCKETLISTENER
+
+ ResourceSocketListener(AndroidMediaResourceServer* aServer) :
+ mServer(aServer)
+ {
+ }
+
+private:
+ virtual ~ResourceSocketListener() { }
+};
+
+NS_IMPL_ISUPPORTS(ResourceSocketListener, nsIServerSocketListener)
+
+NS_IMETHODIMP
+ResourceSocketListener::OnSocketAccepted(nsIServerSocket* aServ,
+ nsISocketTransport* aTrans)
+{
+ nsCOMPtr<nsIInputStream> input;
+ nsCOMPtr<nsIOutputStream> output;
+ nsresult rv;
+
+ rv = aTrans->OpenInputStream(nsITransport::OPEN_BLOCKING, 0, 0, getter_AddRefs(input));
+ if (NS_FAILED(rv)) return rv;
+
+ rv = aTrans->OpenOutputStream(nsITransport::OPEN_BLOCKING, 0, 0, getter_AddRefs(output));
+ if (NS_FAILED(rv)) return rv;
+
+ nsCOMPtr<nsIThread> thread;
+ rv = NS_NewThread(getter_AddRefs(thread));
+ if (NS_FAILED(rv)) return rv;
+
+ nsCOMPtr<nsIRunnable> event = new ServeResourceEvent(input.get(), output.get(), mServer);
+ return thread->Dispatch(event, NS_DISPATCH_NORMAL);
+}
+
+NS_IMETHODIMP
+ResourceSocketListener::OnStopListening(nsIServerSocket* aServ, nsresult aStatus)
+{
+ return NS_OK;
+}
+
+AndroidMediaResourceServer::AndroidMediaResourceServer() :
+ mMutex("AndroidMediaResourceServer")
+{
+}
+
+NS_IMETHODIMP
+AndroidMediaResourceServer::Run()
+{
+ MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
+ MutexAutoLock lock(mMutex);
+
+ nsresult rv;
+ mSocket = do_CreateInstance(NS_SERVERSOCKET_CONTRACTID, &rv);
+ if (NS_FAILED(rv)) return rv;
+
+ rv = mSocket->InitSpecialConnection(-1,
+ nsIServerSocket::LoopbackOnly
+ | nsIServerSocket::KeepWhenOffline,
+ -1);
+ if (NS_FAILED(rv)) return rv;
+
+ rv = mSocket->AsyncListen(new ResourceSocketListener(this));
+ if (NS_FAILED(rv)) return rv;
+
+ return NS_OK;
+}
+
+/* static */
+already_AddRefed<AndroidMediaResourceServer>
+AndroidMediaResourceServer::Start()
+{
+ MOZ_ASSERT(NS_IsMainThread());
+ RefPtr<AndroidMediaResourceServer> server = new AndroidMediaResourceServer();
+ server->Run();
+ return server.forget();
+}
+
+void
+AndroidMediaResourceServer::Stop()
+{
+ MutexAutoLock lock(mMutex);
+ mSocket->Close();
+ mSocket = nullptr;
+}
+
+nsresult
+AndroidMediaResourceServer::AppendRandomPath(nsCString& aUrl)
+{
+ // Use a cryptographic quality PRNG to generate raw random bytes
+ // and convert that to a base64 string for use as an URL path. This
+ // is based on code from nsExternalAppHandler::SetUpTempFile.
+ nsresult rv;
+ nsAutoCString salt;
+ rv = GenerateRandomPathName(salt, 16);
+ if (NS_FAILED(rv)) return rv;
+ aUrl += "/";
+ aUrl += salt;
+ return NS_OK;
+}
+
+nsresult
+AndroidMediaResourceServer::AddResource(mozilla::MediaResource* aResource, nsCString& aUrl)
+{
+ nsCString url = GetURLPrefix();
+ nsresult rv = AppendRandomPath(url);
+ if (NS_FAILED (rv)) return rv;
+
+ {
+ MutexAutoLock lock(mMutex);
+
+ // Adding a resource URL that already exists is considered an error.
+ if (mResources.find(url) != mResources.end()) return NS_ERROR_FAILURE;
+ mResources[url] = aResource;
+ }
+
+ aUrl = url;
+
+ return NS_OK;
+}
+
+void
+AndroidMediaResourceServer::RemoveResource(nsCString const& aUrl)
+{
+ MutexAutoLock lock(mMutex);
+ mResources.erase(aUrl);
+}
+
+nsCString
+AndroidMediaResourceServer::GetURLPrefix()
+{
+ MutexAutoLock lock(mMutex);
+
+ int32_t port = 0;
+ nsresult rv = mSocket->GetPort(&port);
+ if (NS_FAILED (rv) || port < 0) {
+ return nsCString("");
+ }
+
+ char buffer[256];
+ snprintf(buffer, sizeof(buffer), "http://127.0.0.1:%d", port >= 0 ? port : 0);
+ return nsCString(buffer);
+}
+
+already_AddRefed<MediaResource>
+AndroidMediaResourceServer::GetResource(nsCString const& aUrl)
+{
+ MutexAutoLock lock(mMutex);
+ ResourceMap::const_iterator it = mResources.find(aUrl);
+ if (it == mResources.end()) return nullptr;
+
+ RefPtr<MediaResource> resource = it->second;
+ return resource.forget();
+}
diff --git a/dom/media/android/AndroidMediaResourceServer.h b/dom/media/android/AndroidMediaResourceServer.h
new file mode 100644
index 000000000..68200f9c0
--- /dev/null
+++ b/dom/media/android/AndroidMediaResourceServer.h
@@ -0,0 +1,96 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(AndroidMediaResourceServer_h_)
+#define AndroidMediaResourceServer_h_
+
+#include <map>
+#include "nsIServerSocket.h"
+#include "MediaResource.h"
+
+namespace mozilla {
+
+class MediaResource;
+
+/*
+ AndroidMediaResourceServer instantiates a socket server that understands
+ HTTP requests for MediaResource instances. The server runs on an
+ automatically selected port and MediaResource instances are registered.
+ The registration returns a string URL than can be used to fetch the
+ resource. That URL contains a randomly generated path to make it
+ difficult for other local applications on the device to guess it.
+
+ The HTTP protocol is limited in that it supports only what the
+ Android DataSource implementation uses to fetch media. It
+ understands HTTP GET and byte range requests.
+
+ The intent of this class is to be used in Media backends that
+ have a system component that does its own network requests. These
+ requests are made against this server which then uses standard
+ Gecko network requests and media cache usage.
+
+ The AndroidMediaResourceServer can be instantiated on any thread and
+ its methods are threadsafe - they can be called on any thread.
+ The server socket itself is always run on the main thread and
+ this is done by the Start() static method by synchronously
+ dispatching to the main thread.
+*/
+class AndroidMediaResourceServer : public Runnable
+{
+private:
+ // Mutex protecting private members of AndroidMediaResourceServer.
+ // All member variables below this point in the class definition
+ // must acquire the mutex before access.
+ mozilla::Mutex mMutex;
+
+ // Server socket used to listen for incoming connections
+ nsCOMPtr<nsIServerSocket> mSocket;
+
+ // Mapping between MediaResource URL's to the MediaResource
+ // object served at that URL.
+ typedef std::map<nsCString,
+ RefPtr<mozilla::MediaResource> > ResourceMap;
+ ResourceMap mResources;
+
+ // Create a AndroidMediaResourceServer that will listen on an automatically
+ // selected port when started. This is private as it should only be
+ // called internally from the public 'Start' method.
+ AndroidMediaResourceServer();
+ NS_IMETHOD Run();
+
+ // Append a random URL path to a string. This is used for creating a
+ // unique URl for a resource which helps prevent malicious software
+ // running on the same machine as the server from guessing the URL
+ // and accessing video data.
+ nsresult AppendRandomPath(nsCString& aURL);
+
+public:
+ // Create a AndroidMediaResourceServer and start it listening. This call will
+ // perform a synchronous request on the main thread.
+ static already_AddRefed<AndroidMediaResourceServer> Start();
+
+ // Stops the server from listening and accepting further connections.
+ void Stop();
+
+ // Add a MediaResource to be served by this server. Stores the
+ // absolute URL that can be used to access the resource in 'aUrl'.
+ nsresult AddResource(mozilla::MediaResource* aResource, nsCString& aUrl);
+
+ // Remove a MediaResource so it is no longer served by this server.
+ // The URL provided must match exactly that provided by a previous
+ // call to "AddResource".
+ void RemoveResource(nsCString const& aUrl);
+
+ // Returns the prefix for HTTP requests to the server. This plus
+ // the result of AddResource results in an Absolute URL.
+ nsCString GetURLPrefix();
+
+ // Returns the resource asociated with a given URL
+ already_AddRefed<mozilla::MediaResource> GetResource(nsCString const& aUrl);
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/android/MPAPI.h b/dom/media/android/MPAPI.h
new file mode 100644
index 000000000..9b289ca09
--- /dev/null
+++ b/dom/media/android/MPAPI.h
@@ -0,0 +1,165 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(MPAPI_h_)
+#define MPAPI_h_
+
+#include <stdint.h>
+
+namespace MPAPI {
+
+enum ColorFormat {
+ I420,
+ RGB565
+};
+
+/*
+ * A callback for the plugin to use to request a buffer owned by gecko. This can
+ * save us a copy or two down the line.
+ */
+class BufferCallback {
+public:
+ virtual void *operator()(size_t aWidth, size_t aHeight,
+ ColorFormat aColorFormat) = 0;
+};
+
+struct VideoPlane {
+ VideoPlane() :
+ mData(0),
+ mStride(0),
+ mWidth(0),
+ mHeight(0),
+ mOffset(0),
+ mSkip(0)
+ {}
+
+ void *mData;
+ int32_t mStride;
+ int32_t mWidth;
+ int32_t mHeight;
+ int32_t mOffset;
+ int32_t mSkip;
+};
+
+struct VideoFrame {
+ int64_t mTimeUs;
+ bool mKeyFrame;
+ void *mData;
+ size_t mSize;
+ int32_t mStride;
+ int32_t mSliceHeight;
+ int32_t mRotation;
+ VideoPlane Y;
+ VideoPlane Cb;
+ VideoPlane Cr;
+
+ VideoFrame() :
+ mTimeUs(0),
+ mKeyFrame(false),
+ mData(0),
+ mSize(0),
+ mStride(0),
+ mSliceHeight(0),
+ mRotation(0)
+ {}
+
+ void Set(int64_t aTimeUs, bool aKeyFrame,
+ void *aData, size_t aSize, int32_t aStride, int32_t aSliceHeight, int32_t aRotation,
+ void *aYData, int32_t aYStride, int32_t aYWidth, int32_t aYHeight, int32_t aYOffset, int32_t aYSkip,
+ void *aCbData, int32_t aCbStride, int32_t aCbWidth, int32_t aCbHeight, int32_t aCbOffset, int32_t aCbSkip,
+ void *aCrData, int32_t aCrStride, int32_t aCrWidth, int32_t aCrHeight, int32_t aCrOffset, int32_t aCrSkip)
+ {
+ mTimeUs = aTimeUs;
+ mKeyFrame = aKeyFrame;
+ mData = aData;
+ mSize = aSize;
+ mStride = aStride;
+ mSliceHeight = aSliceHeight;
+ mRotation = aRotation;
+ Y.mData = aYData;
+ Y.mStride = aYStride;
+ Y.mWidth = aYWidth;
+ Y.mHeight = aYHeight;
+ Y.mOffset = aYOffset;
+ Y.mSkip = aYSkip;
+ Cb.mData = aCbData;
+ Cb.mStride = aCbStride;
+ Cb.mWidth = aCbWidth;
+ Cb.mHeight = aCbHeight;
+ Cb.mOffset = aCbOffset;
+ Cb.mSkip = aCbSkip;
+ Cr.mData = aCrData;
+ Cr.mStride = aCrStride;
+ Cr.mWidth = aCrWidth;
+ Cr.mHeight = aCrHeight;
+ Cr.mOffset = aCrOffset;
+ Cr.mSkip = aCrSkip;
+ }
+};
+
+struct AudioFrame {
+ int64_t mTimeUs;
+ void *mData; // 16PCM interleaved
+ size_t mSize; // Size of mData in bytes
+ int32_t mAudioChannels;
+ int32_t mAudioSampleRate;
+
+ AudioFrame() :
+ mTimeUs(0),
+ mData(0),
+ mSize(0),
+ mAudioChannels(0),
+ mAudioSampleRate(0)
+ {
+ }
+
+ void Set(int64_t aTimeUs,
+ void *aData, size_t aSize,
+ int32_t aAudioChannels, int32_t aAudioSampleRate)
+ {
+ mTimeUs = aTimeUs;
+ mData = aData;
+ mSize = aSize;
+ mAudioChannels = aAudioChannels;
+ mAudioSampleRate = aAudioSampleRate;
+ }
+};
+
+struct Decoder;
+
+struct PluginHost {
+ bool (*Read)(Decoder *aDecoder, char *aBuffer, int64_t aOffset, uint32_t aCount, uint32_t* aBytes);
+ uint64_t (*GetLength)(Decoder *aDecoder);
+ void (*SetMetaDataReadMode)(Decoder *aDecoder);
+ void (*SetPlaybackReadMode)(Decoder *aDecoder);
+ bool (*GetIntPref)(const char *aPref, int32_t *aResult);
+ bool (*GetSystemInfoString)(const char *aKey, char *aResult, size_t aResultLen);
+};
+
+struct Decoder {
+ void *mResource;
+ void *mPrivate;
+
+ Decoder();
+
+ void (*GetDuration)(Decoder *aDecoder, int64_t *durationUs);
+ void (*GetVideoParameters)(Decoder *aDecoder, int32_t *aWidth, int32_t *aHeight);
+ void (*GetAudioParameters)(Decoder *aDecoder, int32_t *aNumChannels, int32_t *aSampleRate);
+ bool (*HasVideo)(Decoder *aDecoder);
+ bool (*HasAudio)(Decoder *aDecoder);
+ bool (*ReadVideo)(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback);
+ bool (*ReadAudio)(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs);
+ void (*DestroyDecoder)(Decoder *);
+};
+
+struct Manifest {
+ bool (*CanDecode)(const char *aMimeChars, size_t aMimeLen, const char* const**aCodecs);
+ bool (*CreateDecoder)(PluginHost *aPluginHost, Decoder *aDecoder,
+ const char *aMimeChars, size_t aMimeLen);
+};
+
+}
+
+#endif
diff --git a/dom/media/android/moz.build b/dom/media/android/moz.build
new file mode 100644
index 000000000..3ad43cd50
--- /dev/null
+++ b/dom/media/android/moz.build
@@ -0,0 +1,27 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+EXPORTS += [
+ 'AndroidMediaDecoder.h',
+ 'AndroidMediaPluginHost.h',
+ 'AndroidMediaReader.h',
+ 'AndroidMediaResourceServer.h',
+ 'MPAPI.h',
+]
+
+UNIFIED_SOURCES += [
+ 'AndroidMediaDecoder.cpp',
+ 'AndroidMediaPluginHost.cpp',
+ 'AndroidMediaReader.cpp',
+ 'AndroidMediaResourceServer.cpp',
+]
+
+LOCAL_INCLUDES += [
+ '/dom/base',
+ '/dom/html',
+]
+
+FINAL_LIBRARY = 'xul'