summaryrefslogtreecommitdiffstats
path: root/dom
diff options
context:
space:
mode:
Diffstat (limited to 'dom')
-rw-r--r--dom/base/Element.cpp1
-rw-r--r--dom/base/Location.cpp31
-rw-r--r--dom/base/Navigator.cpp78
-rw-r--r--dom/base/Navigator.h7
-rw-r--r--dom/base/nsCCUncollectableMarker.cpp40
-rw-r--r--dom/base/nsFrameLoader.cpp3
-rw-r--r--dom/base/nsGlobalWindow.cpp66
-rw-r--r--dom/base/nsGlobalWindow.h22
-rw-r--r--dom/base/nsScriptLoader.cpp12
-rw-r--r--dom/canvas/CanvasRenderingContext2D.cpp13
-rw-r--r--dom/canvas/WebGLContext.cpp79
-rw-r--r--dom/canvas/WebGLContext.h3
-rw-r--r--dom/crypto/WebCryptoTask.cpp6
-rw-r--r--dom/gamepad/GamepadManager.cpp18
-rw-r--r--dom/gamepad/GamepadManager.h6
-rw-r--r--dom/gamepad/ipc/GamepadServiceType.h4
-rw-r--r--dom/html/HTMLCanvasElement.cpp41
-rw-r--r--dom/html/HTMLCanvasElement.h4
-rw-r--r--dom/html/HTMLOptionsCollection.cpp51
-rw-r--r--dom/html/HTMLOptionsCollection.h9
-rw-r--r--dom/html/HTMLSelectElement.cpp5
-rw-r--r--dom/html/HTMLSelectElement.h2
-rw-r--r--dom/html/TextTrackManager.cpp9
-rw-r--r--dom/html/TextTrackManager.h8
-rw-r--r--dom/ipc/ContentChild.cpp9
-rw-r--r--dom/ipc/ContentChild.h6
-rw-r--r--dom/ipc/ContentParent.cpp6
-rw-r--r--dom/ipc/PContent.ipdl3
-rw-r--r--dom/ipc/TabChild.cpp2
-rw-r--r--dom/media/DecoderTraits.cpp89
-rw-r--r--dom/media/MP3FrameParser.cpp591
-rw-r--r--dom/media/MP3FrameParser.h219
-rw-r--r--dom/media/MediaDecoder.cpp14
-rw-r--r--dom/media/MediaDecoder.h4
-rw-r--r--dom/media/MediaPrefs.h3
-rw-r--r--dom/media/ThreadPoolCOMListener.h4
-rw-r--r--dom/media/WebVTTListener.h1
-rw-r--r--dom/media/android/AndroidMediaDecoder.cpp25
-rw-r--r--dom/media/android/AndroidMediaDecoder.h28
-rw-r--r--dom/media/android/AndroidMediaPluginHost.cpp305
-rw-r--r--dom/media/android/AndroidMediaPluginHost.h41
-rw-r--r--dom/media/android/AndroidMediaReader.cpp449
-rw-r--r--dom/media/android/AndroidMediaReader.h75
-rw-r--r--dom/media/android/AndroidMediaResourceServer.cpp503
-rw-r--r--dom/media/android/AndroidMediaResourceServer.h96
-rw-r--r--dom/media/android/MPAPI.h165
-rw-r--r--dom/media/android/moz.build27
-rw-r--r--dom/media/directshow/AudioSinkFilter.cpp285
-rw-r--r--dom/media/directshow/AudioSinkFilter.h95
-rw-r--r--dom/media/directshow/AudioSinkInputPin.cpp195
-rw-r--r--dom/media/directshow/AudioSinkInputPin.h76
-rw-r--r--dom/media/directshow/DirectShowDecoder.cpp65
-rw-r--r--dom/media/directshow/DirectShowDecoder.h45
-rw-r--r--dom/media/directshow/DirectShowReader.cpp360
-rw-r--r--dom/media/directshow/DirectShowReader.h110
-rw-r--r--dom/media/directshow/DirectShowUtils.cpp369
-rw-r--r--dom/media/directshow/DirectShowUtils.h125
-rw-r--r--dom/media/directshow/SampleSink.cpp159
-rw-r--r--dom/media/directshow/SampleSink.h67
-rw-r--r--dom/media/directshow/SourceFilter.cpp683
-rw-r--r--dom/media/directshow/SourceFilter.h75
-rw-r--r--dom/media/directshow/moz.build41
-rw-r--r--dom/media/fmp4/MP4Decoder.cpp12
-rw-r--r--dom/media/fmp4/MP4Stream.cpp3
-rw-r--r--dom/media/fmp4/moz.build3
-rw-r--r--dom/media/gmp/GMPParent.cpp3
-rw-r--r--dom/media/gtest/Cargo.toml7
-rw-r--r--dom/media/gtest/TestMP3Demuxer.cpp1
-rw-r--r--dom/media/gtest/TestMP4Reader.cpp217
-rw-r--r--dom/media/gtest/TestRust.cpp9
-rw-r--r--dom/media/gtest/hello.rs6
-rw-r--r--dom/media/gtest/moz.build1
-rw-r--r--dom/media/mediasource/MediaSource.cpp2
-rw-r--r--dom/media/mediasource/moz.build3
-rw-r--r--dom/media/moz.build18
-rw-r--r--dom/media/mp3/MP3Decoder.cpp (renamed from dom/media/MP3Decoder.cpp)2
-rw-r--r--dom/media/mp3/MP3Decoder.h (renamed from dom/media/MP3Decoder.h)0
-rw-r--r--dom/media/mp3/MP3Demuxer.cpp (renamed from dom/media/MP3Demuxer.cpp)2
-rw-r--r--dom/media/mp3/MP3Demuxer.h (renamed from dom/media/MP3Demuxer.h)2
-rw-r--r--dom/media/mp3/moz.build17
-rw-r--r--dom/media/platforms/MediaTelemetryConstants.h22
-rw-r--r--dom/media/platforms/PDMFactory.cpp9
-rw-r--r--dom/media/platforms/moz.build1
-rw-r--r--dom/media/platforms/omx/OmxPlatformLayer.cpp23
-rw-r--r--dom/media/platforms/wmf/DXVA2Manager.cpp1
-rw-r--r--dom/media/platforms/wmf/WMFDecoderModule.h6
-rw-r--r--dom/media/platforms/wmf/WMFMediaDataDecoder.h2
-rw-r--r--dom/media/platforms/wmf/WMFVideoMFTManager.cpp1
-rw-r--r--dom/media/test/manifest.js4
-rw-r--r--dom/media/test/test_can_play_type_mpeg.html3
-rw-r--r--dom/moz.build1
-rw-r--r--dom/vr/VRDisplay.cpp802
-rw-r--r--dom/vr/VRDisplay.h362
-rw-r--r--dom/vr/VREventObserver.cpp79
-rw-r--r--dom/vr/VREventObserver.h33
-rw-r--r--dom/vr/moz.build22
-rw-r--r--dom/webidl/Navigator.webidl8
-rw-r--r--dom/webidl/VRDisplay.webidl286
-rw-r--r--dom/webidl/moz.build1
99 files changed, 145 insertions, 7762 deletions
diff --git a/dom/base/Element.cpp b/dom/base/Element.cpp
index 79b36a314..3760dc43f 100644
--- a/dom/base/Element.cpp
+++ b/dom/base/Element.cpp
@@ -145,7 +145,6 @@
#include "mozilla/dom/KeyframeEffectBinding.h"
#include "mozilla/dom/WindowBinding.h"
#include "mozilla/dom/ElementBinding.h"
-#include "mozilla/dom/VRDisplay.h"
#include "mozilla/IntegerPrintfMacros.h"
#include "mozilla/Preferences.h"
#include "nsComputedDOMStyle.h"
diff --git a/dom/base/Location.cpp b/dom/base/Location.cpp
index 7b3722f09..e312cffe0 100644
--- a/dom/base/Location.cpp
+++ b/dom/base/Location.cpp
@@ -33,6 +33,7 @@
#include "nsCycleCollectionParticipant.h"
#include "nsNullPrincipal.h"
#include "ScriptSettings.h"
+#include "mozilla/Unused.h"
#include "mozilla/dom/LocationBinding.h"
namespace mozilla {
@@ -716,9 +717,15 @@ Location::SetProtocol(const nsAString& aProtocol)
return rv;
}
- rv = uri->SetScheme(NS_ConvertUTF16toUTF8(aProtocol));
+ nsAString::const_iterator start, end;
+ aProtocol.BeginReading(start);
+ aProtocol.EndReading(end);
+ nsAString::const_iterator iter(start);
+ Unused << FindCharInReadable(':', iter, end);
+
+ rv = uri->SetScheme(NS_ConvertUTF16toUTF8(Substring(start, iter)));
if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
+ return NS_ERROR_DOM_SYNTAX_ERR;
}
nsAutoCString newSpec;
rv = uri->GetSpec(newSpec);
@@ -728,8 +735,28 @@ Location::SetProtocol(const nsAString& aProtocol)
// We may want a new URI class for the new URI, so recreate it:
rv = NS_NewURI(getter_AddRefs(uri), newSpec);
if (NS_FAILED(rv)) {
+ if (rv == NS_ERROR_MALFORMED_URI) {
+ rv = NS_ERROR_DOM_SYNTAX_ERR;
+ }
return rv;
}
+
+ bool isHttp;
+ rv = uri->SchemeIs("http", &isHttp);
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ return rv;
+ }
+
+ bool isHttps;
+ rv = uri->SchemeIs("https", &isHttps);
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ return rv;
+ }
+
+ if (!isHttp && !isHttps) {
+ // No-op, per spec.
+ return NS_OK;
+ }
return SetURI(uri);
}
diff --git a/dom/base/Navigator.cpp b/dom/base/Navigator.cpp
index 1bc4f82f4..286cd0e79 100644
--- a/dom/base/Navigator.cpp
+++ b/dom/base/Navigator.cpp
@@ -44,7 +44,6 @@
#include "mozilla/dom/ServiceWorkerContainer.h"
#include "mozilla/dom/StorageManager.h"
#include "mozilla/dom/TCPSocket.h"
-#include "mozilla/dom/VRDisplay.h"
#include "mozilla/dom/workers/RuntimeService.h"
#include "mozilla/Hal.h"
#include "nsISiteSpecificUserAgent.h"
@@ -1471,83 +1470,6 @@ Navigator::RequestGamepadServiceTest()
}
#endif
-already_AddRefed<Promise>
-Navigator::GetVRDisplays(ErrorResult& aRv)
-{
- if (!mWindow || !mWindow->GetDocShell()) {
- aRv.Throw(NS_ERROR_UNEXPECTED);
- return nullptr;
- }
-
- nsGlobalWindow* win = nsGlobalWindow::Cast(mWindow);
- win->NotifyVREventListenerAdded();
-
- nsCOMPtr<nsIGlobalObject> go = do_QueryInterface(mWindow);
- RefPtr<Promise> p = Promise::Create(go, aRv);
- if (aRv.Failed()) {
- return nullptr;
- }
-
- // We pass mWindow's id to RefreshVRDisplays, so NotifyVRDisplaysUpdated will
- // be called asynchronously, resolving the promises in mVRGetDisplaysPromises.
- if (!VRDisplay::RefreshVRDisplays(win->WindowID())) {
- p->MaybeReject(NS_ERROR_FAILURE);
- return p.forget();
- }
-
- mVRGetDisplaysPromises.AppendElement(p);
- return p.forget();
-}
-
-void
-Navigator::GetActiveVRDisplays(nsTArray<RefPtr<VRDisplay>>& aDisplays) const
-{
- /**
- * Get only the active VR displays.
- * Callers do not wish to VRDisplay::RefreshVRDisplays, as the enumeration may
- * activate hardware that is not yet intended to be used.
- */
- if (!mWindow || !mWindow->GetDocShell()) {
- return;
- }
- nsGlobalWindow* win = nsGlobalWindow::Cast(mWindow);
- win->NotifyVREventListenerAdded();
- nsTArray<RefPtr<VRDisplay>> displays;
- if (win->UpdateVRDisplays(displays)) {
- for (auto display : displays) {
- if (display->IsPresenting()) {
- aDisplays.AppendElement(display);
- }
- }
- }
-}
-
-void
-Navigator::NotifyVRDisplaysUpdated()
-{
- // Synchronize the VR devices and resolve the promises in
- // mVRGetDisplaysPromises
- nsGlobalWindow* win = nsGlobalWindow::Cast(mWindow);
-
- nsTArray<RefPtr<VRDisplay>> vrDisplays;
- if (win->UpdateVRDisplays(vrDisplays)) {
- for (auto p : mVRGetDisplaysPromises) {
- p->MaybeResolve(vrDisplays);
- }
- } else {
- for (auto p : mVRGetDisplaysPromises) {
- p->MaybeReject(NS_ERROR_FAILURE);
- }
- }
- mVRGetDisplaysPromises.Clear();
-}
-
-void
-Navigator::NotifyActiveVRDisplaysChanged()
-{
- NavigatorBinding::ClearCachedActiveVRDisplaysValue(this);
-}
-
//*****************************************************************************
// Navigator::nsIMozNavigatorNetwork
//*****************************************************************************
diff --git a/dom/base/Navigator.h b/dom/base/Navigator.h
index d47a80bc1..91b7fc15c 100644
--- a/dom/base/Navigator.h
+++ b/dom/base/Navigator.h
@@ -76,7 +76,6 @@ class Connection;
class PowerManager;
class Presentation;
class LegacyMozTCPSocket;
-class VRDisplay;
class StorageManager;
namespace time {
@@ -204,8 +203,6 @@ public:
void GetGamepads(nsTArray<RefPtr<Gamepad> >& aGamepads, ErrorResult& aRv);
GamepadServiceTest* RequestGamepadServiceTest();
#endif // MOZ_GAMEPAD
- already_AddRefed<Promise> GetVRDisplays(ErrorResult& aRv);
- void GetActiveVRDisplays(nsTArray<RefPtr<VRDisplay>>& aDisplays) const;
#ifdef MOZ_TIME_MANAGER
time::TimeManager* GetMozTime(ErrorResult& aRv);
#endif // MOZ_TIME_MANAGER
@@ -269,10 +266,6 @@ private:
RefPtr<MediaKeySystemAccessManager> mMediaKeySystemAccessManager;
#endif
-public:
- void NotifyVRDisplaysUpdated();
- void NotifyActiveVRDisplaysChanged();
-
private:
virtual ~Navigator();
diff --git a/dom/base/nsCCUncollectableMarker.cpp b/dom/base/nsCCUncollectableMarker.cpp
index 861cda521..db4d0d351 100644
--- a/dom/base/nsCCUncollectableMarker.cpp
+++ b/dom/base/nsCCUncollectableMarker.cpp
@@ -188,23 +188,20 @@ MarkMessageManagers()
}
void
-MarkContentViewer(nsIContentViewer* aViewer, bool aCleanupJS,
- bool aPrepareForCC)
+MarkDocument(nsIDocument* aDoc, bool aCleanupJS, bool aPrepareForCC)
{
- if (!aViewer) {
+ if (!aDoc) {
return;
}
- nsIDocument *doc = aViewer->GetDocument();
- if (doc &&
- doc->GetMarkedCCGeneration() != nsCCUncollectableMarker::sGeneration) {
- doc->MarkUncollectableForCCGeneration(nsCCUncollectableMarker::sGeneration);
+ if (aDoc->GetMarkedCCGeneration() != nsCCUncollectableMarker::sGeneration) {
+ aDoc->MarkUncollectableForCCGeneration(nsCCUncollectableMarker::sGeneration);
if (aCleanupJS) {
- EventListenerManager* elm = doc->GetExistingListenerManager();
+ EventListenerManager* elm = aDoc->GetExistingListenerManager();
if (elm) {
elm->MarkForCC();
}
- nsCOMPtr<EventTarget> win = do_QueryInterface(doc->GetInnerWindow());
+ nsCOMPtr<EventTarget> win = do_QueryInterface(aDoc->GetInnerWindow());
if (win) {
elm = win->GetExistingListenerManager();
if (elm) {
@@ -215,18 +212,27 @@ MarkContentViewer(nsIContentViewer* aViewer, bool aCleanupJS,
} else if (aPrepareForCC) {
// Unfortunately we need to still mark user data just before running CC so
// that it has the right generation.
- doc->PropertyTable(DOM_USER_DATA)->
+ aDoc->PropertyTable(DOM_USER_DATA)->
EnumerateAll(MarkUserData, &nsCCUncollectableMarker::sGeneration);
}
}
- if (doc) {
- if (nsPIDOMWindowInner* inner = doc->GetInnerWindow()) {
- inner->MarkUncollectableForCCGeneration(nsCCUncollectableMarker::sGeneration);
- }
- if (nsPIDOMWindowOuter* outer = doc->GetWindow()) {
- outer->MarkUncollectableForCCGeneration(nsCCUncollectableMarker::sGeneration);
- }
+ if (nsPIDOMWindowInner* inner = aDoc->GetInnerWindow()) {
+ inner->MarkUncollectableForCCGeneration(nsCCUncollectableMarker::sGeneration);
+ }
+ if (nsPIDOMWindowOuter* outer = aDoc->GetWindow()) {
+ outer->MarkUncollectableForCCGeneration(nsCCUncollectableMarker::sGeneration);
+ }
+}
+
+void
+MarkContentViewer(nsIContentViewer* aViewer, bool aCleanupJS,
+ bool aPrepareForCC)
+{
+ if (!aViewer) {
+ return;
}
+
+ MarkDocument(aViewer->GetDocument(), aCleanupJS, aPrepareForCC);
}
void MarkDocShell(nsIDocShellTreeItem* aNode, bool aCleanupJS,
diff --git a/dom/base/nsFrameLoader.cpp b/dom/base/nsFrameLoader.cpp
index 23067becd..2804f2d4c 100644
--- a/dom/base/nsFrameLoader.cpp
+++ b/dom/base/nsFrameLoader.cpp
@@ -588,6 +588,9 @@ nsFrameLoader::ReallyStartLoadingInternal()
flags = nsIWebNavigation::LOAD_FLAGS_ALLOW_THIRD_PARTY_FIXUP |
nsIWebNavigation::LOAD_FLAGS_DISALLOW_INHERIT_PRINCIPAL;
}
+
+ // Notify that this load resulted from attribute changes.
+ loadInfo->SetIsFromProcessingFrameAttributes(true);
// Kick off the load...
bool tmpState = mNeedsAsyncDestroy;
diff --git a/dom/base/nsGlobalWindow.cpp b/dom/base/nsGlobalWindow.cpp
index 86160c77c..677e1a0ea 100644
--- a/dom/base/nsGlobalWindow.cpp
+++ b/dom/base/nsGlobalWindow.cpp
@@ -202,9 +202,6 @@
#include "mozilla/dom/GamepadManager.h"
#endif
-#include "mozilla/dom/VRDisplay.h"
-#include "mozilla/dom/VREventObserver.h"
-
#include "nsRefreshDriver.h"
#include "Layers.h"
@@ -1532,7 +1529,6 @@ nsGlobalWindow::nsGlobalWindow(nsGlobalWindow *aOuterWindow)
mShowFocusRingForContent(false),
mFocusByKeyOccurred(false),
mHasGamepad(false),
- mHasVREvents(false),
#ifdef MOZ_GAMEPAD
mHasSeenGamepadInput(false),
#endif
@@ -1967,12 +1963,9 @@ nsGlobalWindow::CleanUp()
if (IsInnerWindow()) {
DisableGamepadUpdates();
mHasGamepad = false;
- DisableVRUpdates();
- mHasVREvents = false;
DisableIdleCallbackRequests();
} else {
MOZ_ASSERT(!mHasGamepad);
- MOZ_ASSERT(!mHasVREvents);
}
if (mCleanMessageManager) {
@@ -2118,9 +2111,6 @@ nsGlobalWindow::FreeInnerObjects(bool aForDocumentOpen)
mHasGamepad = false;
mGamepads.Clear();
#endif
- DisableVRUpdates();
- mHasVREvents = false;
- mVRDisplays.Clear();
}
//*****************************************************************************
@@ -2275,7 +2265,6 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INTERNAL(nsGlobalWindow)
#endif
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mCacheStorage)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVRDisplays)
// Traverse stuff from nsPIDOMWindow
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mChromeEventHandler)
@@ -2352,7 +2341,6 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsGlobalWindow)
#endif
NS_IMPL_CYCLE_COLLECTION_UNLINK(mCacheStorage)
- NS_IMPL_CYCLE_COLLECTION_UNLINK(mVRDisplays)
// Unlink stuff from nsPIDOMWindow
NS_IMPL_CYCLE_COLLECTION_UNLINK(mChromeEventHandler)
@@ -10508,24 +10496,6 @@ nsGlobalWindow::DisableGamepadUpdates()
}
void
-nsGlobalWindow::EnableVRUpdates()
-{
- MOZ_ASSERT(IsInnerWindow());
-
- if (mHasVREvents && !mVREventObserver) {
- mVREventObserver = new VREventObserver(this);
- }
-}
-
-void
-nsGlobalWindow::DisableVRUpdates()
-{
- MOZ_ASSERT(IsInnerWindow());
-
- mVREventObserver = nullptr;
-}
-
-void
nsGlobalWindow::SetChromeEventHandler(EventTarget* aChromeEventHandler)
{
MOZ_ASSERT(IsOuterWindow());
@@ -12210,7 +12180,6 @@ nsGlobalWindow::Suspend()
ac->RemoveWindowListener(mEnabledSensors[i], this);
}
DisableGamepadUpdates();
- DisableVRUpdates();
mozilla::dom::workers::SuspendWorkersForWindow(AsInner());
@@ -12274,7 +12243,6 @@ nsGlobalWindow::Resume()
ac->AddWindowListener(mEnabledSensors[i], this);
}
EnableGamepadUpdates();
- EnableVRUpdates();
// Resume all of the AudioContexts for this window
for (uint32_t i = 0; i < mAudioContexts.Length(); ++i) {
@@ -14030,19 +13998,6 @@ nsGlobalWindow::SetHasGamepadEventListener(bool aHasGamepad/* = true*/)
void
nsGlobalWindow::EventListenerAdded(nsIAtom* aType)
{
- if (aType == nsGkAtoms::onvrdisplayconnect ||
- aType == nsGkAtoms::onvrdisplaydisconnect ||
- aType == nsGkAtoms::onvrdisplaypresentchange) {
- NotifyVREventListenerAdded();
- }
-}
-
-void
-nsGlobalWindow::NotifyVREventListenerAdded()
-{
- MOZ_ASSERT(IsInnerWindow());
- mHasVREvents = true;
- EnableVRUpdates();
}
void
@@ -14187,27 +14142,6 @@ nsGlobalWindow::SyncGamepadState()
}
#endif // MOZ_GAMEPAD
-bool
-nsGlobalWindow::UpdateVRDisplays(nsTArray<RefPtr<mozilla::dom::VRDisplay>>& aDevices)
-{
- FORWARD_TO_INNER(UpdateVRDisplays, (aDevices), false);
-
- VRDisplay::UpdateVRDisplays(mVRDisplays, AsInner());
- aDevices = mVRDisplays;
- return true;
-}
-
-void
-nsGlobalWindow::NotifyActiveVRDisplaysChanged()
-{
- MOZ_ASSERT(IsInnerWindow());
-
- if (mNavigator) {
- mNavigator->NotifyActiveVRDisplaysChanged();
- }
-}
-
-
// nsGlobalChromeWindow implementation
NS_IMPL_CYCLE_COLLECTION_CLASS(nsGlobalChromeWindow)
diff --git a/dom/base/nsGlobalWindow.h b/dom/base/nsGlobalWindow.h
index 80bf33b80..1cb825a77 100644
--- a/dom/base/nsGlobalWindow.h
+++ b/dom/base/nsGlobalWindow.h
@@ -135,8 +135,6 @@ class SpeechSynthesis;
class TabGroup;
class Timeout;
class U2F;
-class VRDisplay;
-class VREventObserver;
class WakeLock;
#if defined(MOZ_WIDGET_ANDROID)
class WindowOrientationObserver;
@@ -745,18 +743,6 @@ public:
void EnableGamepadUpdates();
void DisableGamepadUpdates();
- // Inner windows only.
- // Enable/disable updates for VR
- void EnableVRUpdates();
- void DisableVRUpdates();
-
- // Update the VR displays for this window
- bool UpdateVRDisplays(nsTArray<RefPtr<mozilla::dom::VRDisplay>>& aDisplays);
-
- // Inner windows only.
- // Called to inform that the set of active VR displays has changed.
- void NotifyActiveVRDisplaysChanged();
-
#define EVENT(name_, id_, type_, struct_) \
mozilla::dom::EventHandlerNonNull* GetOn##name_() \
{ \
@@ -1832,9 +1818,6 @@ protected:
// Indicates whether this window wants gamepad input events
bool mHasGamepad : 1;
- // Inner windows only.
- // Indicates whether this window wants VR events
- bool mHasVREvents : 1;
#ifdef MOZ_GAMEPAD
nsCheapSet<nsUint32HashKey> mGamepadIndexSet;
nsRefPtrHashtable<nsUint32HashKey, mozilla::dom::Gamepad> mGamepads;
@@ -1989,11 +1972,6 @@ protected:
// This is the CC generation the last time we called CanSkip.
uint32_t mCanSkipCCGeneration;
- // The VR Displays for this window
- nsTArray<RefPtr<mozilla::dom::VRDisplay>> mVRDisplays;
-
- nsAutoPtr<mozilla::dom::VREventObserver> mVREventObserver;
-
friend class nsDOMScriptableHelper;
friend class nsDOMWindowUtils;
friend class mozilla::dom::PostMessageEvent;
diff --git a/dom/base/nsScriptLoader.cpp b/dom/base/nsScriptLoader.cpp
index a6d20e363..0eb5bbf31 100644
--- a/dom/base/nsScriptLoader.cpp
+++ b/dom/base/nsScriptLoader.cpp
@@ -81,11 +81,19 @@ ImplCycleCollectionTraverse(nsCycleCollectionTraversalCallback& aCallback,
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsScriptLoadRequest)
NS_INTERFACE_MAP_END
-NS_IMPL_CYCLE_COLLECTION_0(nsScriptLoadRequest)
-
NS_IMPL_CYCLE_COLLECTING_ADDREF(nsScriptLoadRequest)
NS_IMPL_CYCLE_COLLECTING_RELEASE(nsScriptLoadRequest)
+NS_IMPL_CYCLE_COLLECTION_CLASS(nsScriptLoadRequest)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(nsScriptLoadRequest)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mElement)
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsScriptLoadRequest)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mElement)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
nsScriptLoadRequest::~nsScriptLoadRequest()
{
js_free(mScriptTextBuf);
diff --git a/dom/canvas/CanvasRenderingContext2D.cpp b/dom/canvas/CanvasRenderingContext2D.cpp
index 18af28e9f..e3406fc02 100644
--- a/dom/canvas/CanvasRenderingContext2D.cpp
+++ b/dom/canvas/CanvasRenderingContext2D.cpp
@@ -783,6 +783,15 @@ public:
: mCanvas(aCanvas)
{}
+ void OnShutdown() {
+ if(!mCanvas) {
+ return;
+ }
+
+ mCanvas = nullptr;
+ nsContentUtils::UnregisterShutdownObserver(this);
+ }
+
NS_DECL_ISUPPORTS
NS_DECL_NSIOBSERVER
private:
@@ -800,7 +809,7 @@ CanvasShutdownObserver::Observe(nsISupports* aSubject,
{
if (mCanvas && strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
mCanvas->OnShutdown();
- nsContentUtils::UnregisterShutdownObserver(this);
+ OnShutdown();
}
return NS_OK;
@@ -1218,7 +1227,7 @@ void
CanvasRenderingContext2D::RemoveShutdownObserver()
{
if (mShutdownObserver) {
- nsContentUtils::UnregisterShutdownObserver(mShutdownObserver);
+ mShutdownObserver->OnShutdown();
mShutdownObserver = nullptr;
}
}
diff --git a/dom/canvas/WebGLContext.cpp b/dom/canvas/WebGLContext.cpp
index 14bc7e3e3..e2e05e5fd 100644
--- a/dom/canvas/WebGLContext.cpp
+++ b/dom/canvas/WebGLContext.cpp
@@ -47,7 +47,6 @@
#include "nsSVGEffects.h"
#include "prenv.h"
#include "ScopedGLHelpers.h"
-#include "VRManagerChild.h"
#include "mozilla/layers/TextureClientSharedSurface.h"
// Local
@@ -2239,84 +2238,6 @@ WebGLContext::GetUnpackSize(bool isFunc3D, uint32_t width, uint32_t height,
return totalBytes;
}
-already_AddRefed<layers::SharedSurfaceTextureClient>
-WebGLContext::GetVRFrame()
-{
- if (!mLayerIsMirror) {
- /**
- * Do not allow VR frame submission until a mirroring canvas layer has
- * been returned by GetCanvasLayer
- */
- return nullptr;
- }
-
- VRManagerChild* vrmc = VRManagerChild::Get();
- if (!vrmc) {
- return nullptr;
- }
-
- /**
- * Swap buffers as though composition has occurred.
- * We will then share the resulting front buffer to be submitted to the VR
- * compositor.
- */
- BeginComposition();
- EndComposition();
-
- gl::GLScreenBuffer* screen = gl->Screen();
- if (!screen) {
- return nullptr;
- }
-
- RefPtr<SharedSurfaceTextureClient> sharedSurface = screen->Front();
- if (!sharedSurface) {
- return nullptr;
- }
-
- if (sharedSurface && sharedSurface->GetAllocator() != vrmc) {
- RefPtr<SharedSurfaceTextureClient> dest =
- screen->Factory()->NewTexClient(sharedSurface->GetSize());
- if (!dest) {
- return nullptr;
- }
- gl::SharedSurface* destSurf = dest->Surf();
- destSurf->ProducerAcquire();
- SharedSurface::ProdCopy(sharedSurface->Surf(), dest->Surf(),
- screen->Factory());
- destSurf->ProducerRelease();
-
- return dest.forget();
- }
-
- return sharedSurface.forget();
-}
-
-bool
-WebGLContext::StartVRPresentation()
-{
- VRManagerChild* vrmc = VRManagerChild::Get();
- if (!vrmc) {
- return false;
- }
- gl::GLScreenBuffer* screen = gl->Screen();
- if (!screen) {
- return false;
- }
- gl::SurfaceCaps caps = screen->mCaps;
-
- UniquePtr<gl::SurfaceFactory> factory =
- gl::GLScreenBuffer::CreateFactory(gl,
- caps,
- vrmc,
- vrmc->GetBackendType(),
- TextureFlags::ORIGIN_BOTTOM_LEFT);
-
- if (factory) {
- screen->Morph(Move(factory));
- }
- return true;
-}
-
////////////////////////////////////////////////////////////////////////////////
static inline size_t
diff --git a/dom/canvas/WebGLContext.h b/dom/canvas/WebGLContext.h
index b4d416a33..3ec307b00 100644
--- a/dom/canvas/WebGLContext.h
+++ b/dom/canvas/WebGLContext.h
@@ -656,9 +656,6 @@ public:
void PixelStorei(GLenum pname, GLint param);
void PolygonOffset(GLfloat factor, GLfloat units);
- already_AddRefed<layers::SharedSurfaceTextureClient> GetVRFrame();
- bool StartVRPresentation();
-
////
webgl::PackingInfo
diff --git a/dom/crypto/WebCryptoTask.cpp b/dom/crypto/WebCryptoTask.cpp
index c4cc7080d..e5f5882e9 100644
--- a/dom/crypto/WebCryptoTask.cpp
+++ b/dom/crypto/WebCryptoTask.cpp
@@ -584,7 +584,7 @@ public:
mMechanism = CKM_AES_CBC_PAD;
telemetryAlg = TA_AES_CBC;
- AesCbcParams params;
+ RootedDictionary<AesCbcParams> params(aCx);
nsresult rv = Coerce(aCx, params, aAlgorithm);
if (NS_FAILED(rv)) {
mEarlyRv = NS_ERROR_DOM_INVALID_ACCESS_ERR;
@@ -601,7 +601,7 @@ public:
mMechanism = CKM_AES_CTR;
telemetryAlg = TA_AES_CTR;
- AesCtrParams params;
+ RootedDictionary<AesCtrParams> params(aCx);
nsresult rv = Coerce(aCx, params, aAlgorithm);
if (NS_FAILED(rv)) {
mEarlyRv = NS_ERROR_DOM_SYNTAX_ERR;
@@ -620,7 +620,7 @@ public:
mMechanism = CKM_AES_GCM;
telemetryAlg = TA_AES_GCM;
- AesGcmParams params;
+ RootedDictionary<AesGcmParams> params(aCx);
nsresult rv = Coerce(aCx, params, aAlgorithm);
if (NS_FAILED(rv)) {
mEarlyRv = NS_ERROR_DOM_SYNTAX_ERR;
diff --git a/dom/gamepad/GamepadManager.cpp b/dom/gamepad/GamepadManager.cpp
index dde71dd0a..e17829652 100644
--- a/dom/gamepad/GamepadManager.cpp
+++ b/dom/gamepad/GamepadManager.cpp
@@ -28,7 +28,6 @@
#include "nsIObserverService.h"
#include "nsIServiceManager.h"
#include "nsThreadUtils.h"
-#include "VRManagerChild.h"
#include "mozilla/Services.h"
#include "mozilla/Unused.h"
@@ -110,11 +109,6 @@ GamepadManager::StopMonitoring()
}
mChannelChildren.Clear();
mGamepads.Clear();
-
-#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
- mVRChannelChild = gfx::VRManagerChild::Get();
- mVRChannelChild->SendControllerListenerRemoved();
-#endif
}
void
@@ -211,11 +205,6 @@ uint32_t GamepadManager::GetGamepadIndexWithServiceType(uint32_t aIndex,
newIndex = aIndex;
break;
}
- case GamepadServiceType::VR:
- {
- newIndex = aIndex + VR_GAMEPAD_IDX_OFFSET;
- break;
- }
default:
MOZ_ASSERT(false);
break;
@@ -679,13 +668,6 @@ GamepadManager::ActorCreated(PBackgroundChild *aActor)
MOZ_ASSERT(initedChild == child);
child->SendGamepadListenerAdded();
mChannelChildren.AppendElement(child);
-
-#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
- // Construct VRManagerChannel and ask adding the connected
- // VR controllers to GamepadManager
- mVRChannelChild = gfx::VRManagerChild::Get();
- mVRChannelChild->SendControllerListenerAdded();
-#endif
}
//Override nsIIPCBackgroundChildCreateCallback
diff --git a/dom/gamepad/GamepadManager.h b/dom/gamepad/GamepadManager.h
index 1bb437d8f..a772221ca 100644
--- a/dom/gamepad/GamepadManager.h
+++ b/dom/gamepad/GamepadManager.h
@@ -16,9 +16,6 @@
class nsGlobalWindow;
namespace mozilla {
-namespace gfx {
-class VRManagerChild;
-} // namespace gfx
namespace dom {
class EventTarget;
@@ -123,7 +120,6 @@ class GamepadManager final : public nsIObserver,
// will be destroyed during the IPDL shutdown chain, so we
// don't need to refcount it here.
nsTArray<GamepadEventChannelChild *> mChannelChildren;
- gfx::VRManagerChild* mVRChannelChild;
private:
@@ -138,8 +134,6 @@ class GamepadManager final : public nsIObserver,
// Indicate that a window has received data from a gamepad.
void SetWindowHasSeenGamepad(nsGlobalWindow* aWindow, uint32_t aIndex,
bool aHasSeen = true);
- // Our gamepad index has VR_GAMEPAD_IDX_OFFSET while GamepadChannelType
- // is from VRManager.
uint32_t GetGamepadIndexWithServiceType(uint32_t aIndex, GamepadServiceType aServiceType);
// Gamepads connected to the system. Copies of these are handed out
diff --git a/dom/gamepad/ipc/GamepadServiceType.h b/dom/gamepad/ipc/GamepadServiceType.h
index acc0967d1..6200cdfa9 100644
--- a/dom/gamepad/ipc/GamepadServiceType.h
+++ b/dom/gamepad/ipc/GamepadServiceType.h
@@ -6,11 +6,9 @@ namespace mozilla{
namespace dom{
// Standard channel is used for managing gamepads that
-// are from GamepadPlatformService. VR channel
-// is for gamepads that are from VRManagerChild.
+// are from GamepadPlatformService.
enum class GamepadServiceType : uint16_t {
Standard,
- VR,
NumGamepadServiceType
};
diff --git a/dom/html/HTMLCanvasElement.cpp b/dom/html/HTMLCanvasElement.cpp
index 88b41bce0..527135a80 100644
--- a/dom/html/HTMLCanvasElement.cpp
+++ b/dom/html/HTMLCanvasElement.cpp
@@ -42,7 +42,6 @@
#include "nsRefreshDriver.h"
#include "nsStreamUtils.h"
#include "ActiveLayerTracker.h"
-#include "VRManagerChild.h"
#include "WebGL1Context.h"
#include "WebGL2Context.h"
@@ -358,7 +357,6 @@ NS_IMPL_ISUPPORTS(HTMLCanvasElementObserver, nsIObserver)
HTMLCanvasElement::HTMLCanvasElement(already_AddRefed<mozilla::dom::NodeInfo>& aNodeInfo)
: nsGenericHTMLElement(aNodeInfo),
mResetLayer(true) ,
- mVRPresentationActive(false),
mWriteOnly(false)
{}
@@ -1111,7 +1109,7 @@ HTMLCanvasElement::GetCanvasLayer(nsDisplayListBuilder* aBuilder,
static uint8_t sOffscreenCanvasLayerUserDataDummy = 0;
if (mCurrentContext) {
- return mCurrentContext->GetCanvasLayer(aBuilder, aOldLayer, aManager, mVRPresentationActive);
+ return mCurrentContext->GetCanvasLayer(aBuilder, aOldLayer, aManager);
}
if (mOffscreenCanvas) {
@@ -1441,42 +1439,5 @@ HTMLCanvasElement::InvalidateFromAsyncCanvasRenderer(AsyncCanvasRenderer *aRende
element->InvalidateCanvasContent(nullptr);
}
-void
-HTMLCanvasElement::StartVRPresentation()
-{
- WebGLContext* webgl = static_cast<WebGLContext*>(GetContextAtIndex(0));
- if (!webgl) {
- return;
- }
-
- if (!webgl->StartVRPresentation()) {
- return;
- }
-
- mVRPresentationActive = true;
-}
-
-void
-HTMLCanvasElement::StopVRPresentation()
-{
- mVRPresentationActive = false;
-}
-
-already_AddRefed<layers::SharedSurfaceTextureClient>
-HTMLCanvasElement::GetVRFrame()
-{
- if (GetCurrentContextType() != CanvasContextType::WebGL1 &&
- GetCurrentContextType() != CanvasContextType::WebGL2) {
- return nullptr;
- }
-
- WebGLContext* webgl = static_cast<WebGLContext*>(GetContextAtIndex(0));
- if (!webgl) {
- return nullptr;
- }
-
- return webgl->GetVRFrame();
-}
-
} // namespace dom
} // namespace mozilla
diff --git a/dom/html/HTMLCanvasElement.h b/dom/html/HTMLCanvasElement.h
index 81c141d3c..746fab198 100644
--- a/dom/html/HTMLCanvasElement.h
+++ b/dom/html/HTMLCanvasElement.h
@@ -350,10 +350,6 @@ public:
static void SetAttrFromAsyncCanvasRenderer(AsyncCanvasRenderer *aRenderer);
static void InvalidateFromAsyncCanvasRenderer(AsyncCanvasRenderer *aRenderer);
- void StartVRPresentation();
- void StopVRPresentation();
- already_AddRefed<layers::SharedSurfaceTextureClient> GetVRFrame();
-
protected:
virtual ~HTMLCanvasElement();
diff --git a/dom/html/HTMLOptionsCollection.cpp b/dom/html/HTMLOptionsCollection.cpp
index 294493c0c..67de97fc4 100644
--- a/dom/html/HTMLOptionsCollection.cpp
+++ b/dom/html/HTMLOptionsCollection.cpp
@@ -35,23 +35,8 @@ namespace mozilla {
namespace dom {
HTMLOptionsCollection::HTMLOptionsCollection(HTMLSelectElement* aSelect)
-{
- // Do not maintain a reference counted reference. When
- // the select goes away, it will let us know.
- mSelect = aSelect;
-}
-
-HTMLOptionsCollection::~HTMLOptionsCollection()
-{
- DropReference();
-}
-
-void
-HTMLOptionsCollection::DropReference()
-{
- // Drop our (non ref-counted) reference
- mSelect = nullptr;
-}
+ : mSelect(aSelect)
+{}
nsresult
HTMLOptionsCollection::GetOptionIndex(Element* aOption,
@@ -88,7 +73,9 @@ HTMLOptionsCollection::GetOptionIndex(Element* aOption,
}
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(HTMLOptionsCollection, mElements)
+NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(HTMLOptionsCollection,
+ mElements,
+ mSelect)
// nsISupports
@@ -124,10 +111,6 @@ HTMLOptionsCollection::GetLength(uint32_t* aLength)
NS_IMETHODIMP
HTMLOptionsCollection::SetLength(uint32_t aLength)
{
- if (!mSelect) {
- return NS_ERROR_UNEXPECTED;
- }
-
return mSelect->SetLength(aLength);
}
@@ -135,10 +118,6 @@ NS_IMETHODIMP
HTMLOptionsCollection::SetOption(uint32_t aIndex,
nsIDOMHTMLOptionElement* aOption)
{
- if (!mSelect) {
- return NS_OK;
- }
-
// if the new option is null, just remove this option. Note that it's safe
// to pass a too-large aIndex in here.
if (!aOption) {
@@ -187,11 +166,6 @@ HTMLOptionsCollection::SetOption(uint32_t aIndex,
int32_t
HTMLOptionsCollection::GetSelectedIndex(ErrorResult& aError)
{
- if (!mSelect) {
- aError.Throw(NS_ERROR_UNEXPECTED);
- return 0;
- }
-
int32_t selectedIndex;
aError = mSelect->GetSelectedIndex(&selectedIndex);
return selectedIndex;
@@ -209,11 +183,6 @@ void
HTMLOptionsCollection::SetSelectedIndex(int32_t aSelectedIndex,
ErrorResult& aError)
{
- if (!mSelect) {
- aError.Throw(NS_ERROR_UNEXPECTED);
- return;
- }
-
aError = mSelect->SetSelectedIndex(aSelectedIndex);
}
@@ -339,22 +308,12 @@ HTMLOptionsCollection::Add(const HTMLOptionOrOptGroupElement& aElement,
const Nullable<HTMLElementOrLong>& aBefore,
ErrorResult& aError)
{
- if (!mSelect) {
- aError.Throw(NS_ERROR_NOT_INITIALIZED);
- return;
- }
-
mSelect->Add(aElement, aBefore, aError);
}
void
HTMLOptionsCollection::Remove(int32_t aIndex, ErrorResult& aError)
{
- if (!mSelect) {
- aError.Throw(NS_ERROR_UNEXPECTED);
- return;
- }
-
uint32_t len = 0;
mSelect->GetLength(&len);
if (aIndex < 0 || (uint32_t)aIndex >= len)
diff --git a/dom/html/HTMLOptionsCollection.h b/dom/html/HTMLOptionsCollection.h
index 21123b3d2..496919555 100644
--- a/dom/html/HTMLOptionsCollection.h
+++ b/dom/html/HTMLOptionsCollection.h
@@ -46,7 +46,7 @@ public:
using nsWrapperCache::GetWrapper;
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
protected:
- virtual ~HTMLOptionsCollection();
+ virtual ~HTMLOptionsCollection() = default;
virtual JSObject* GetWrapperPreserveColorInternal() override
{
@@ -113,11 +113,6 @@ public:
}
/**
- * Drop the reference to the select. Called during select destruction.
- */
- void DropReference();
-
- /**
* Finds the index of a given option element.
* If the option isn't part of the collection, return NS_ERROR_FAILURE
* without setting aIndex.
@@ -161,7 +156,7 @@ private:
* various members such as InsertOptionAt are also infallible. */
nsTArray<RefPtr<mozilla::dom::HTMLOptionElement> > mElements;
/** The select element that contains this array */
- HTMLSelectElement* mSelect;
+ RefPtr<HTMLSelectElement> mSelect;
};
} // namespace dom
diff --git a/dom/html/HTMLSelectElement.cpp b/dom/html/HTMLSelectElement.cpp
index 53f42317a..9ba0a1efe 100644
--- a/dom/html/HTMLSelectElement.cpp
+++ b/dom/html/HTMLSelectElement.cpp
@@ -130,11 +130,6 @@ HTMLSelectElement::HTMLSelectElement(already_AddRefed<mozilla::dom::NodeInfo>& a
NS_EVENT_STATE_VALID);
}
-HTMLSelectElement::~HTMLSelectElement()
-{
- mOptions->DropReference();
-}
-
// ISupports
NS_IMPL_CYCLE_COLLECTION_CLASS(HTMLSelectElement)
diff --git a/dom/html/HTMLSelectElement.h b/dom/html/HTMLSelectElement.h
index 8a25385de..dc1075cd7 100644
--- a/dom/html/HTMLSelectElement.h
+++ b/dom/html/HTMLSelectElement.h
@@ -436,7 +436,7 @@ public:
void SetOpenInParentProcess(bool aVal);
protected:
- virtual ~HTMLSelectElement();
+ virtual ~HTMLSelectElement() = default;
friend class SafeOptionListMutation;
diff --git a/dom/html/TextTrackManager.cpp b/dom/html/TextTrackManager.cpp
index 4266575f7..cc14858b6 100644
--- a/dom/html/TextTrackManager.cpp
+++ b/dom/html/TextTrackManager.cpp
@@ -29,6 +29,13 @@ namespace dom {
NS_IMPL_ISUPPORTS(TextTrackManager::ShutdownObserverProxy, nsIObserver);
+void
+TextTrackManager::ShutdownObserverProxy::Unregister()
+{
+ nsContentUtils::UnregisterShutdownObserver(this);
+ mManager = nullptr;
+}
+
CompareTextTracks::CompareTextTracks(HTMLMediaElement* aMediaElement)
{
mMediaElement = aMediaElement;
@@ -137,7 +144,7 @@ TextTrackManager::TextTrackManager(HTMLMediaElement *aMediaElement)
TextTrackManager::~TextTrackManager()
{
WEBVTT_LOG("%p ~TextTrackManager",this);
- nsContentUtils::UnregisterShutdownObserver(mShutdownProxy);
+ mShutdownProxy->Unregister();
}
TextTrackList*
diff --git a/dom/html/TextTrackManager.h b/dom/html/TextTrackManager.h
index d20707346..4ad1a57a7 100644
--- a/dom/html/TextTrackManager.h
+++ b/dom/html/TextTrackManager.h
@@ -170,11 +170,15 @@ private:
{
MOZ_ASSERT(NS_IsMainThread());
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
- nsContentUtils::UnregisterShutdownObserver(this);
- mManager->NotifyShutdown();
+ if (mManager) {
+ mManager->NotifyShutdown();
+ }
+ Unregister();
}
return NS_OK;
}
+
+ void Unregister();
private:
~ShutdownObserverProxy() {};
diff --git a/dom/ipc/ContentChild.cpp b/dom/ipc/ContentChild.cpp
index fc288e2c5..fdf0fcf3e 100644
--- a/dom/ipc/ContentChild.cpp
+++ b/dom/ipc/ContentChild.cpp
@@ -171,7 +171,6 @@
#include "GMPServiceChild.h"
#include "gfxPlatform.h"
#include "nscore.h" // for NS_FREE_PERMANENT_DATA
-#include "VRManagerChild.h"
using namespace mozilla;
using namespace mozilla::docshell;
@@ -1148,7 +1147,6 @@ ContentChild::RecvGMPsChanged(nsTArray<GMPCapabilityData>&& capabilities)
bool
ContentChild::RecvInitRendering(Endpoint<PCompositorBridgeChild>&& aCompositor,
Endpoint<PImageBridgeChild>&& aImageBridge,
- Endpoint<PVRManagerChild>&& aVRBridge,
Endpoint<PVideoDecoderManagerChild>&& aVideoManager)
{
if (!CompositorBridgeChild::InitForContent(Move(aCompositor))) {
@@ -1157,9 +1155,6 @@ ContentChild::RecvInitRendering(Endpoint<PCompositorBridgeChild>&& aCompositor,
if (!ImageBridgeChild::InitForContent(Move(aImageBridge))) {
return false;
}
- if (!gfx::VRManagerChild::InitForContent(Move(aVRBridge))) {
- return false;
- }
VideoDecoderManagerChild::InitForContent(Move(aVideoManager));
return true;
}
@@ -1167,7 +1162,6 @@ ContentChild::RecvInitRendering(Endpoint<PCompositorBridgeChild>&& aCompositor,
bool
ContentChild::RecvReinitRendering(Endpoint<PCompositorBridgeChild>&& aCompositor,
Endpoint<PImageBridgeChild>&& aImageBridge,
- Endpoint<PVRManagerChild>&& aVRBridge,
Endpoint<PVideoDecoderManagerChild>&& aVideoManager)
{
nsTArray<RefPtr<TabChild>> tabs = TabChild::GetAll();
@@ -1186,9 +1180,6 @@ ContentChild::RecvReinitRendering(Endpoint<PCompositorBridgeChild>&& aCompositor
if (!ImageBridgeChild::ReinitForContent(Move(aImageBridge))) {
return false;
}
- if (!gfx::VRManagerChild::ReinitForContent(Move(aVRBridge))) {
- return false;
- }
// Establish new PLayerTransactions.
for (const auto& tabChild : tabs) {
diff --git a/dom/ipc/ContentChild.h b/dom/ipc/ContentChild.h
index ba590b58e..f29d17e7f 100644
--- a/dom/ipc/ContentChild.h
+++ b/dom/ipc/ContentChild.h
@@ -152,15 +152,13 @@ public:
RecvInitRendering(
Endpoint<PCompositorBridgeChild>&& aCompositor,
Endpoint<PImageBridgeChild>&& aImageBridge,
- Endpoint<PVRManagerChild>&& aVRBridge,
- Endpoint<PVideoDecoderManagerChild>&& aVideoManager) override;
+ Endpoint<PVideoDecoderManagerChild>&& aVideoManager);
bool
RecvReinitRendering(
Endpoint<PCompositorBridgeChild>&& aCompositor,
Endpoint<PImageBridgeChild>&& aImageBridge,
- Endpoint<PVRManagerChild>&& aVRBridge,
- Endpoint<PVideoDecoderManagerChild>&& aVideoManager) override;
+ Endpoint<PVideoDecoderManagerChild>&& aVideoManager);
PProcessHangMonitorChild*
AllocPProcessHangMonitorChild(Transport* aTransport,
diff --git a/dom/ipc/ContentParent.cpp b/dom/ipc/ContentParent.cpp
index 5c6aadb77..417420ecb 100644
--- a/dom/ipc/ContentParent.cpp
+++ b/dom/ipc/ContentParent.cpp
@@ -2073,21 +2073,18 @@ ContentParent::InitInternal(ProcessPriority aInitialPriority,
Endpoint<PCompositorBridgeChild> compositor;
Endpoint<PImageBridgeChild> imageBridge;
- Endpoint<PVRManagerChild> vrBridge;
Endpoint<PVideoDecoderManagerChild> videoManager;
DebugOnly<bool> opened = gpm->CreateContentBridges(
OtherPid(),
&compositor,
&imageBridge,
- &vrBridge,
&videoManager);
MOZ_ASSERT(opened);
Unused << SendInitRendering(
Move(compositor),
Move(imageBridge),
- Move(vrBridge),
Move(videoManager));
gpm->AddListener(this);
@@ -2201,21 +2198,18 @@ ContentParent::OnCompositorUnexpectedShutdown()
Endpoint<PCompositorBridgeChild> compositor;
Endpoint<PImageBridgeChild> imageBridge;
- Endpoint<PVRManagerChild> vrBridge;
Endpoint<PVideoDecoderManagerChild> videoManager;
DebugOnly<bool> opened = gpm->CreateContentBridges(
OtherPid(),
&compositor,
&imageBridge,
- &vrBridge,
&videoManager);
MOZ_ASSERT(opened);
Unused << SendReinitRendering(
Move(compositor),
Move(imageBridge),
- Move(vrBridge),
Move(videoManager));
}
diff --git a/dom/ipc/PContent.ipdl b/dom/ipc/PContent.ipdl
index c01ad59c1..9298f9d02 100644
--- a/dom/ipc/PContent.ipdl
+++ b/dom/ipc/PContent.ipdl
@@ -44,7 +44,6 @@ include protocol PRemoteSpellcheckEngine;
include protocol PWebBrowserPersistDocument;
include protocol PWebrtcGlobal;
include protocol PPresentation;
-include protocol PVRManager;
include protocol PVideoDecoderManager;
include protocol PFlyWebPublishedServer;
include DOMTypes;
@@ -311,7 +310,6 @@ child:
async InitRendering(
Endpoint<PCompositorBridgeChild> compositor,
Endpoint<PImageBridgeChild> imageBridge,
- Endpoint<PVRManagerChild> vr,
Endpoint<PVideoDecoderManagerChild> video);
// Re-create the rendering stack using the given endpoints. This is sent
@@ -320,7 +318,6 @@ child:
async ReinitRendering(
Endpoint<PCompositorBridgeChild> compositor,
Endpoint<PImageBridgeChild> bridge,
- Endpoint<PVRManagerChild> vr,
Endpoint<PVideoDecoderManagerChild> video);
/**
diff --git a/dom/ipc/TabChild.cpp b/dom/ipc/TabChild.cpp
index 244fa9969..3fe94001e 100644
--- a/dom/ipc/TabChild.cpp
+++ b/dom/ipc/TabChild.cpp
@@ -107,7 +107,6 @@
#include "nsDeviceContext.h"
#include "nsSandboxFlags.h"
#include "FrameLayerBuilder.h"
-#include "VRManagerChild.h"
#include "nsICommandParams.h"
#include "nsISHistory.h"
#include "nsQueryObject.h"
@@ -2565,7 +2564,6 @@ TabChild::InitRenderingState(const TextureFactoryIdentifier& aTextureFactoryIden
lf->SetShadowManager(shadowManager);
lf->IdentifyTextureHost(mTextureFactoryIdentifier);
ImageBridgeChild::IdentifyCompositorTextureHost(mTextureFactoryIdentifier);
- gfx::VRManagerChild::IdentifyTextureHost(mTextureFactoryIdentifier);
}
mRemoteFrame = remoteFrame;
diff --git a/dom/media/DecoderTraits.cpp b/dom/media/DecoderTraits.cpp
index 56ebd9ce9..f8cb4fd0e 100644
--- a/dom/media/DecoderTraits.cpp
+++ b/dom/media/DecoderTraits.cpp
@@ -18,15 +18,6 @@
#include "WebMDecoder.h"
#include "WebMDemuxer.h"
-#ifdef MOZ_ANDROID_OMX
-#include "AndroidMediaDecoder.h"
-#include "AndroidMediaReader.h"
-#include "AndroidMediaPluginHost.h"
-#endif
-#ifdef MOZ_DIRECTSHOW
-#include "DirectShowDecoder.h"
-#include "DirectShowReader.h"
-#endif
#ifdef MOZ_FMP4
#include "MP4Decoder.h"
#include "MP4Demuxer.h"
@@ -110,29 +101,6 @@ IsHttpLiveStreamingType(const nsACString& aType)
return CodecListContains(gHttpLiveStreamingTypes, aType);
}
-#ifdef MOZ_ANDROID_OMX
-static bool
-IsAndroidMediaType(const nsACString& aType)
-{
- if (!MediaDecoder::IsAndroidMediaPluginEnabled()) {
- return false;
- }
-
- static const char* supportedTypes[] = {
- "audio/mpeg", "audio/mp4", "video/mp4", "video/x-m4v", nullptr
- };
- return CodecListContains(supportedTypes, aType);
-}
-#endif
-
-#ifdef MOZ_DIRECTSHOW
-static bool
-IsDirectShowSupportedType(const nsACString& aType)
-{
- return DirectShowDecoder::GetSupportedCodecs(aType, nullptr);
-}
-#endif
-
#ifdef MOZ_FMP4
static bool
IsMP4SupportedType(const MediaContentType& aParsedType,
@@ -247,14 +215,6 @@ CanHandleCodecsType(const MediaContentType& aType,
if (IsFlacSupportedType(aType.GetMIMEType(), aType.GetCodecs())) {
return CANPLAY_YES;
}
-#ifdef MOZ_DIRECTSHOW
- DirectShowDecoder::GetSupportedCodecs(aType.GetMIMEType(), &codecList);
-#endif
-#ifdef MOZ_ANDROID_OMX
- if (MediaDecoder::IsAndroidMediaPluginEnabled()) {
- EnsureAndroidMediaPluginHost()->FindDecoder(aType.GetMIMEType(), &codecList);
- }
-#endif
if (!codecList) {
return CANPLAY_MAYBE;
}
@@ -320,17 +280,6 @@ CanHandleMediaType(const MediaContentType& aType,
if (IsFlacSupportedType(aType.GetMIMEType())) {
return CANPLAY_MAYBE;
}
-#ifdef MOZ_DIRECTSHOW
- if (DirectShowDecoder::GetSupportedCodecs(aType.GetMIMEType(), nullptr)) {
- return CANPLAY_MAYBE;
- }
-#endif
-#ifdef MOZ_ANDROID_OMX
- if (MediaDecoder::IsAndroidMediaPluginEnabled() &&
- EnsureAndroidMediaPluginHost()->FindDecoder(aType.GetMIMEType(), nullptr)) {
- return CANPLAY_MAYBE;
- }
-#endif
return CANPLAY_NO;
}
@@ -411,28 +360,12 @@ InstantiateDecoder(const nsACString& aType,
decoder = new FlacDecoder(aOwner);
return decoder.forget();
}
-#ifdef MOZ_ANDROID_OMX
- if (MediaDecoder::IsAndroidMediaPluginEnabled() &&
- EnsureAndroidMediaPluginHost()->FindDecoder(aType, nullptr)) {
- decoder = new AndroidMediaDecoder(aOwner, aType);
- return decoder.forget();
- }
-#endif
if (IsWebMSupportedType(aType)) {
decoder = new WebMDecoder(aOwner);
return decoder.forget();
}
-#ifdef MOZ_DIRECTSHOW
- // Note: DirectShow should come before WMF, so that we prefer DirectShow's
- // MP3 support over WMF's.
- if (IsDirectShowSupportedType(aType)) {
- decoder = new DirectShowDecoder(aOwner);
- return decoder.forget();
- }
-#endif
-
return nullptr;
}
@@ -461,7 +394,7 @@ MediaDecoderReader* DecoderTraits::CreateReader(const nsACString& aType, Abstrac
} else
#endif
if (IsMP3SupportedType(aType)) {
- decoderReader = new MediaFormatReader(aDecoder, new mp3::MP3Demuxer(aDecoder->GetResource()));
+ decoderReader = new MediaFormatReader(aDecoder, new MP3Demuxer(aDecoder->GetResource()));
} else
if (IsAACSupportedType(aType)) {
decoderReader = new MediaFormatReader(aDecoder, new ADTSDemuxer(aDecoder->GetResource()));
@@ -475,22 +408,10 @@ MediaDecoderReader* DecoderTraits::CreateReader(const nsACString& aType, Abstrac
if (IsOggSupportedType(aType)) {
decoderReader = new MediaFormatReader(aDecoder, new OggDemuxer(aDecoder->GetResource()));
} else
-#ifdef MOZ_ANDROID_OMX
- if (MediaDecoder::IsAndroidMediaPluginEnabled() &&
- EnsureAndroidMediaPluginHost()->FindDecoder(aType, nullptr)) {
- decoderReader = new AndroidMediaReader(aDecoder, aType);
- } else
-#endif
if (IsWebMSupportedType(aType)) {
decoderReader =
new MediaFormatReader(aDecoder, new WebMDemuxer(aDecoder->GetResource()));
- } else
-#ifdef MOZ_DIRECTSHOW
- if (IsDirectShowSupportedType(aType)) {
- decoderReader = new DirectShowReader(aDecoder);
- } else
-#endif
- if (false) {} // dummy if to take care of the dangling else
+ }
return decoderReader;
}
@@ -509,18 +430,12 @@ bool DecoderTraits::IsSupportedInVideoDocument(const nsACString& aType)
return
IsOggSupportedType(aType) ||
IsWebMSupportedType(aType) ||
-#ifdef MOZ_ANDROID_OMX
- (MediaDecoder::IsAndroidMediaPluginEnabled() && IsAndroidMediaType(aType)) ||
-#endif
#ifdef MOZ_FMP4
IsMP4SupportedType(aType, /* DecoderDoctorDiagnostics* */ nullptr) ||
#endif
IsMP3SupportedType(aType) ||
IsAACSupportedType(aType) ||
IsFlacSupportedType(aType) ||
-#ifdef MOZ_DIRECTSHOW
- IsDirectShowSupportedType(aType) ||
-#endif
false;
}
diff --git a/dom/media/MP3FrameParser.cpp b/dom/media/MP3FrameParser.cpp
deleted file mode 100644
index 242e3df00..000000000
--- a/dom/media/MP3FrameParser.cpp
+++ /dev/null
@@ -1,591 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include <algorithm>
-
-#include "nsMemory.h"
-#include "MP3FrameParser.h"
-#include "VideoUtils.h"
-
-
-#define FROM_BIG_ENDIAN(X) ((uint32_t)((uint8_t)(X)[0] << 24 | (uint8_t)(X)[1] << 16 | \
- (uint8_t)(X)[2] << 8 | (uint8_t)(X)[3]))
-
-
-namespace mozilla {
-
-/*
- * Following code taken from http://www.hydrogenaudio.org/forums/index.php?showtopic=85125
- * with permission from the author, Nick Wallette <sirnickity@gmail.com>.
- */
-
-/* BEGIN shameless copy and paste */
-
-// Bitrates - use [version][layer][bitrate]
-const uint16_t mpeg_bitrates[4][4][16] = {
- { // Version 2.5
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // Reserved
- { 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 }, // Layer 3
- { 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 }, // Layer 2
- { 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0 } // Layer 1
- },
- { // Reserved
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // Invalid
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // Invalid
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // Invalid
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 } // Invalid
- },
- { // Version 2
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // Reserved
- { 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 }, // Layer 3
- { 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 }, // Layer 2
- { 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0 } // Layer 1
- },
- { // Version 1
- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // Reserved
- { 0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 0 }, // Layer 3
- { 0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 0 }, // Layer 2
- { 0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 0 }, // Layer 1
- }
-};
-
-// Sample rates - use [version][srate]
-const uint16_t mpeg_srates[4][4] = {
- { 11025, 12000, 8000, 0 }, // MPEG 2.5
- { 0, 0, 0, 0 }, // Reserved
- { 22050, 24000, 16000, 0 }, // MPEG 2
- { 44100, 48000, 32000, 0 } // MPEG 1
-};
-
-// Samples per frame - use [version][layer]
-const uint16_t mpeg_frame_samples[4][4] = {
-// Rsvd 3 2 1 < Layer v Version
- { 0, 576, 1152, 384 }, // 2.5
- { 0, 0, 0, 0 }, // Reserved
- { 0, 576, 1152, 384 }, // 2
- { 0, 1152, 1152, 384 } // 1
-};
-
-// Slot size (MPEG unit of measurement) - use [layer]
-const uint8_t mpeg_slot_size[4] = { 0, 1, 1, 4 }; // Rsvd, 3, 2, 1
-
-uint16_t
-MP3Frame::CalculateLength()
-{
- // Lookup real values of these fields
- uint32_t bitrate = mpeg_bitrates[mVersion][mLayer][mBitrate] * 1000;
- uint32_t samprate = mpeg_srates[mVersion][mSampleRate];
- uint16_t samples = mpeg_frame_samples[mVersion][mLayer];
- uint8_t slot_size = mpeg_slot_size[mLayer];
-
- // In-between calculations
- float bps = (float)samples / 8.0;
- float fsize = ( (bps * (float)bitrate) / (float)samprate )
- + ( (mPad) ? slot_size : 0 );
-
- // Frame sizes are truncated integers
- return (uint16_t)fsize;
-}
-
-/* END shameless copy and paste */
-
-
-/** MP3Parser methods **/
-
-MP3Parser::MP3Parser()
- : mCurrentChar(0)
-{ }
-
-void
-MP3Parser::Reset()
-{
- mCurrentChar = 0;
-}
-
-uint16_t
-MP3Parser::ParseFrameLength(uint8_t ch)
-{
- mData.mRaw[mCurrentChar] = ch;
-
- MP3Frame &frame = mData.mFrame;
-
- // Validate MP3 header as we read. We can't mistake the start of an MP3 frame
- // for the middle of another frame due to the sync byte at the beginning
- // of the frame.
-
- // The only valid position for an all-high byte is the sync byte at the
- // beginning of the frame.
- if (ch == 0xff) {
- mCurrentChar = 0;
- }
-
- // Make sure the current byte is valid in context. If not, reset the parser.
- if (mCurrentChar == 2) {
- if (frame.mBitrate == 0x0f) {
- goto fail;
- }
- } else if (mCurrentChar == 1) {
- if (frame.mSync2 != 0x07
- || frame.mVersion == 0x01
- || frame.mLayer == 0x00) {
- goto fail;
- }
- }
-
- // The only valid character at the beginning of the header is 0xff. Fail if
- // it's different.
- if (mCurrentChar == 0 && frame.mSync1 != 0xff) {
- // Couldn't find the sync byte. Fail.
- return 0;
- }
-
- mCurrentChar++;
- MOZ_ASSERT(mCurrentChar <= sizeof(MP3Frame));
-
- // Don't have a full header yet.
- if (mCurrentChar < sizeof(MP3Frame)) {
- return 0;
- }
-
- // Woo, valid header. Return the length.
- mCurrentChar = 0;
- return frame.CalculateLength();
-
-fail:
- Reset();
- return 0;
-}
-
-uint32_t
-MP3Parser::GetSampleRate()
-{
- MP3Frame &frame = mData.mFrame;
- return mpeg_srates[frame.mVersion][frame.mSampleRate];
-}
-
-uint32_t
-MP3Parser::GetSamplesPerFrame()
-{
- MP3Frame &frame = mData.mFrame;
- return mpeg_frame_samples[frame.mVersion][frame.mLayer];
-}
-
-
-/** ID3Parser methods **/
-
-const char sID3Head[3] = { 'I', 'D', '3' };
-const uint32_t ID3_HEADER_LENGTH = 10;
-const uint32_t ID3_FOOTER_LENGTH = 10;
-const uint8_t ID3_FOOTER_PRESENT = 0x10;
-
-ID3Parser::ID3Parser()
- : mCurrentChar(0)
- , mVersion(0)
- , mFlags(0)
- , mHeaderLength(0)
-{ }
-
-void
-ID3Parser::Reset()
-{
- mCurrentChar = mVersion = mFlags = mHeaderLength = 0;
-}
-
-bool
-ID3Parser::ParseChar(char ch)
-{
- switch (mCurrentChar) {
- // The first three bytes of an ID3v2 header must match the string "ID3".
- case 0: case 1: case 2:
- if (ch != sID3Head[mCurrentChar]) {
- goto fail;
- }
- break;
- // The fourth and fifth bytes give the version, between 2 and 4.
- case 3:
- if (ch < '\2' || ch > '\4') {
- goto fail;
- }
- mVersion = uint8_t(ch);
- break;
- case 4:
- if (ch != '\0') {
- goto fail;
- }
- break;
- // The sixth byte gives the flags; valid flags depend on the version.
- case 5:
- if ((ch & (0xff >> mVersion)) != '\0') {
- goto fail;
- }
- mFlags = uint8_t(ch);
- break;
- // Bytes seven through ten give the sum of the byte length of the extended
- // header, the padding and the frames after unsynchronisation.
- // These bytes form a 28-bit integer, with the high bit of each byte unset.
- case 6: case 7: case 8: case 9:
- if (ch & 0x80) {
- goto fail;
- }
- mHeaderLength <<= 7;
- mHeaderLength |= ch;
- if (mCurrentChar == 9) {
- mHeaderLength += ID3_HEADER_LENGTH;
- mHeaderLength += (mFlags & ID3_FOOTER_PRESENT) ? ID3_FOOTER_LENGTH : 0;
- }
- break;
- default:
- MOZ_CRASH("Header already fully parsed!");
- }
-
- mCurrentChar++;
-
- return IsParsed();
-
-fail:
- if (mCurrentChar) {
- Reset();
- return ParseChar(ch);
- }
- Reset();
- return false;
-}
-
-bool
-ID3Parser::IsParsed() const
-{
- return mCurrentChar >= ID3_HEADER_LENGTH;
-}
-
-uint32_t
-ID3Parser::GetHeaderLength() const
-{
- MOZ_ASSERT(IsParsed(),
- "Queried length of ID3 header before parsing finished.");
- return mHeaderLength;
-}
-
-
-/** VBR header helper stuff **/
-
-// Helper function to find a VBR header in an MP3 frame.
-// Based on information from
-// http://www.codeproject.com/Articles/8295/MPEG-Audio-Frame-Header
-
-const uint32_t VBRI_TAG = FROM_BIG_ENDIAN("VBRI");
-const uint32_t VBRI_OFFSET = 32 - sizeof(MP3Frame);
-const uint32_t VBRI_FRAME_COUNT_OFFSET = VBRI_OFFSET + 14;
-const uint32_t VBRI_MIN_FRAME_SIZE = VBRI_OFFSET + 26;
-
-const uint32_t XING_TAG = FROM_BIG_ENDIAN("Xing");
-enum XingFlags {
- XING_HAS_NUM_FRAMES = 0x01,
- XING_HAS_NUM_BYTES = 0x02,
- XING_HAS_TOC = 0x04,
- XING_HAS_VBR_SCALE = 0x08
-};
-
-static int64_t
-ParseXing(const char *aBuffer)
-{
- uint32_t flags = FROM_BIG_ENDIAN(aBuffer + 4);
-
- if (!(flags & XING_HAS_NUM_FRAMES)) {
- NS_WARNING("VBR file without frame count. Duration estimation likely to "
- "be totally wrong.");
- return -1;
- }
-
- int64_t numFrames = -1;
- if (flags & XING_HAS_NUM_FRAMES) {
- numFrames = FROM_BIG_ENDIAN(aBuffer + 8);
- }
-
- return numFrames;
-}
-
-static int64_t
-FindNumVBRFrames(const nsCString& aFrame)
-{
- const char *buffer = aFrame.get();
- const char *bufferEnd = aFrame.get() + aFrame.Length();
-
- // VBRI header is nice and well-defined; let's try to find that first.
- if (aFrame.Length() > VBRI_MIN_FRAME_SIZE &&
- FROM_BIG_ENDIAN(buffer + VBRI_OFFSET) == VBRI_TAG) {
- return FROM_BIG_ENDIAN(buffer + VBRI_FRAME_COUNT_OFFSET);
- }
-
- // We have to search for the Xing header as its position can change.
- for (; buffer + sizeof(XING_TAG) < bufferEnd; buffer++) {
- if (FROM_BIG_ENDIAN(buffer) == XING_TAG) {
- return ParseXing(buffer);
- }
- }
-
- return -1;
-}
-
-
-/** MP3FrameParser methods **/
-
-// Some MP3's have large ID3v2 tags, up to 150KB, so we allow lots of
-// skipped bytes to be read, just in case, before we give up and assume
-// we're not parsing an MP3 stream.
-static const uint32_t MAX_SKIPPED_BYTES = 4096;
-
-enum {
- MP3_HEADER_LENGTH = 4,
-};
-
-MP3FrameParser::MP3FrameParser(int64_t aLength)
-: mLock("MP3FrameParser.mLock"),
- mTotalID3Size(0),
- mTotalFrameSize(0),
- mFrameCount(0),
- mOffset(0),
- mLength(aLength),
- mMP3Offset(-1),
- mSamplesPerSecond(0),
- mFirstFrameEnd(-1),
- mIsMP3(MAYBE_MP3)
-{ }
-
-nsresult MP3FrameParser::ParseBuffer(const uint8_t* aBuffer,
- uint32_t aLength,
- int64_t aStreamOffset,
- uint32_t* aOutBytesRead)
-{
- // Iterate forwards over the buffer, looking for ID3 tag, or MP3
- // Frame headers.
- const uint8_t *buffer = aBuffer;
- const uint8_t *bufferEnd = aBuffer + aLength;
-
- // If we haven't found any MP3 frame data yet, there might be ID3 headers
- // we can skip over.
- if (mMP3Offset < 0) {
- for (const uint8_t *ch = buffer; ch < bufferEnd; ch++) {
- if (mID3Parser.ParseChar(*ch)) {
- // Found an ID3 header. We don't care about the body of the header, so
- // just skip past.
- buffer = ch + mID3Parser.GetHeaderLength() - (ID3_HEADER_LENGTH - 1);
-
- if (buffer <= ch) {
- return NS_ERROR_FAILURE;
- }
-
- ch = buffer;
-
- mTotalID3Size += mID3Parser.GetHeaderLength();
-
- // Yes, this is an MP3!
- mIsMP3 = DEFINITELY_MP3;
-
- mID3Parser.Reset();
- }
- }
- }
-
- // The first MP3 frame in a variable bitrate stream can contain metadata
- // for duration estimation and seeking, so we buffer that first frame here.
- if (aStreamOffset < mFirstFrameEnd) {
- uint64_t copyLen = std::min((int64_t)aLength, mFirstFrameEnd - aStreamOffset);
- mFirstFrame.Append((const char *)buffer, copyLen);
- buffer += copyLen;
- }
-
- while (buffer < bufferEnd) {
- uint16_t frameLen = mMP3Parser.ParseFrameLength(*buffer);
-
- if (frameLen) {
- // We've found an MP3 frame!
- // This is the first frame (and the only one we'll bother parsing), so:
- // * Mark this stream as MP3;
- // * Store the offset at which the MP3 data started; and
- // * Start buffering the frame, as it might contain handy metadata.
-
- // We're now sure this is an MP3 stream.
- mIsMP3 = DEFINITELY_MP3;
-
- // We need to know these to convert the number of frames in the stream
- // to the length of the stream in seconds.
- mSamplesPerSecond = mMP3Parser.GetSampleRate();
- mSamplesPerFrame = mMP3Parser.GetSamplesPerFrame();
-
- // If the stream has a constant bitrate, we should only need the length
- // of the first frame and the length (in bytes) of the stream to
- // estimate the length (in seconds).
- mTotalFrameSize += frameLen;
- mFrameCount++;
-
- // If |mMP3Offset| isn't set then this is the first MP3 frame we have
- // seen in the stream, which is useful for duration estimation.
- if (mMP3Offset > -1) {
- uint16_t skip = frameLen - sizeof(MP3Frame);
- buffer += skip ? skip : 1;
- continue;
- }
-
- // Remember the offset of the MP3 stream.
- // We're at the last byte of an MP3Frame, so MP3 data started
- // sizeof(MP3Frame) - 1 bytes ago.
- mMP3Offset = aStreamOffset
- + (buffer - aBuffer)
- - (sizeof(MP3Frame) - 1);
-
- buffer++;
-
- // If the stream has a variable bitrate, the first frame has metadata
- // we need for duration estimation and seeking. Start buffering it so we
- // can parse it later.
- mFirstFrameEnd = mMP3Offset + frameLen;
- uint64_t currOffset = buffer - aBuffer + aStreamOffset;
- uint64_t copyLen = std::min(mFirstFrameEnd - currOffset,
- (uint64_t)(bufferEnd - buffer));
- mFirstFrame.Append((const char *)buffer, copyLen);
-
- buffer += copyLen;
-
- } else {
- // Nothing to see here. Move along.
- buffer++;
- }
- }
-
- *aOutBytesRead = buffer - aBuffer;
-
- if (mFirstFrameEnd > -1 && mFirstFrameEnd <= aStreamOffset + buffer - aBuffer) {
- // We have our whole first frame. Try to find a VBR header.
- mNumFrames = FindNumVBRFrames(mFirstFrame);
- mFirstFrameEnd = -1;
- }
-
- return NS_OK;
-}
-
-void MP3FrameParser::Parse(const uint8_t* aBuffer, uint32_t aLength, uint64_t aOffset)
-{
- MutexAutoLock mon(mLock);
-
- if (HasExactDuration()) {
- // We know the duration; nothing to do here.
- return;
- }
-
- const uint8_t* buffer = aBuffer;
- int32_t length = aLength;
- uint64_t offset = aOffset;
-
- // Got some data we have seen already. Skip forward to what we need.
- if (aOffset < mOffset) {
- buffer += mOffset - aOffset;
- length -= mOffset - aOffset;
- offset = mOffset;
-
- if (length <= 0) {
- return;
- }
- }
-
- // If there is a discontinuity in the input stream, reset the state of the
- // parsers so we don't get any partial headers.
- if (mOffset < aOffset) {
- if (!mID3Parser.IsParsed()) {
- // Only reset this if it hasn't finished yet.
- mID3Parser.Reset();
- }
-
- if (mFirstFrameEnd > -1) {
- NS_WARNING("Discontinuity in input while buffering first frame.");
- mFirstFrameEnd = -1;
- }
-
- mMP3Parser.Reset();
- }
-
- uint32_t bytesRead = 0;
- if (NS_FAILED(ParseBuffer(buffer,
- length,
- offset,
- &bytesRead))) {
- return;
- }
-
- MOZ_ASSERT(length <= (int)bytesRead, "All bytes should have been consumed");
-
- // Update next data offset
- mOffset = offset + bytesRead;
-
- // If we've parsed lots of data and we still have nothing, just give up.
- // We don't count ID3 headers towards the skipped bytes count, as MP3 files
- // can have massive ID3 sections.
- if (!mID3Parser.IsParsed() && mMP3Offset < 0 &&
- mOffset - mTotalID3Size > MAX_SKIPPED_BYTES) {
- mIsMP3 = NOT_MP3;
- }
-}
-
-int64_t MP3FrameParser::GetDuration()
-{
- MutexAutoLock mon(mLock);
-
- if (!ParsedHeaders() || !mSamplesPerSecond) {
- // Not a single frame decoded yet.
- return -1;
- }
-
- MOZ_ASSERT(mFrameCount > 0 && mTotalFrameSize > 0,
- "Frame parser should have seen at least one MP3 frame of positive length.");
-
- if (!mFrameCount || !mTotalFrameSize) {
- // This should never happen.
- return -1;
- }
-
- double frames;
- if (mNumFrames < 0) {
- // Estimate the number of frames in the stream based on the average frame
- // size and the length of the MP3 file.
- double frameSize = (double)mTotalFrameSize / mFrameCount;
- frames = (double)(mLength - mMP3Offset) / frameSize;
- } else {
- // We know the exact number of frames from the VBR header.
- frames = mNumFrames;
- }
-
- // The duration of each frame is constant over a given stream.
- double usPerFrame = USECS_PER_S * mSamplesPerFrame / mSamplesPerSecond;
-
- return frames * usPerFrame;
-}
-
-int64_t MP3FrameParser::GetMP3Offset()
-{
- MutexAutoLock mon(mLock);
- return mMP3Offset;
-}
-
-bool MP3FrameParser::ParsedHeaders()
-{
- // We have seen both the beginning and the end of the first MP3 frame in the
- // stream.
- return mMP3Offset > -1 && mFirstFrameEnd < 0;
-}
-
-bool MP3FrameParser::HasExactDuration()
-{
- return ParsedHeaders() && mNumFrames > -1;
-}
-
-bool MP3FrameParser::NeedsData()
-{
- // If we don't know the duration exactly then either:
- // - we're still waiting for a VBR header; or
- // - we look at all frames to constantly update our duration estimate.
- return IsMP3() && !HasExactDuration();
-}
-
-} // namespace mozilla
diff --git a/dom/media/MP3FrameParser.h b/dom/media/MP3FrameParser.h
deleted file mode 100644
index d2ba791fd..000000000
--- a/dom/media/MP3FrameParser.h
+++ /dev/null
@@ -1,219 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MP3FrameParser_h
-#define MP3FrameParser_h
-
-#include <stdint.h>
-
-#include "mozilla/Mutex.h"
-#include "nsString.h"
-#include "Intervals.h"
-
-namespace mozilla {
-
-// Simple parser to tell whether we've found an ID3 header and how long it is,
-// so that we can skip it.
-// XXX maybe actually parse this stuff?
-class ID3Parser
-{
-public:
- ID3Parser();
-
- void Reset();
- bool ParseChar(char ch);
- bool IsParsed() const;
- uint32_t GetHeaderLength() const;
-
-private:
- uint32_t mCurrentChar;
- uint8_t mVersion;
- uint8_t mFlags;
- uint32_t mHeaderLength;
-};
-
-struct MP3Frame {
- uint16_t mSync1 : 8; // Always all set
- uint16_t mProtected : 1; // Ignored
- uint16_t mLayer : 2;
- uint16_t mVersion : 2;
- uint16_t mSync2 : 3; // Always all set
- uint16_t mPrivate : 1; // Ignored
- uint16_t mPad : 1;
- uint16_t mSampleRate : 2; // Index into mpeg_srates above
- uint16_t mBitrate : 4; // Index into mpeg_bitrates above
-
- uint16_t CalculateLength();
-};
-
-// Buffering parser for MP3 frames.
-class MP3Parser
-{
-public:
- MP3Parser();
-
- // Forget all data the parser has seen so far.
- void Reset();
-
- // Parse the given byte. If we have found a frame header, return the length of
- // the frame.
- uint16_t ParseFrameLength(uint8_t ch);
-
- // Get the sample rate from the current header.
- uint32_t GetSampleRate();
-
- // Get the number of samples per frame.
- uint32_t GetSamplesPerFrame();
-
-private:
- uint32_t mCurrentChar;
- union {
- uint8_t mRaw[3];
- MP3Frame mFrame;
- } mData;
-};
-
-
-// A description of the MP3 format and its extensions is available at
-//
-// http://www.codeproject.com/Articles/8295/MPEG-Audio-Frame-Header
-//
-// The data in MP3 streams is split into small frames, with each frame
-// containing a fixed number of samples. The duration of a frame depends
-// on the frame's bit rate and sample rate. Both values can vary among
-// frames, so it is necessary to examine each individual frame of an MP3
-// stream to calculate the stream's overall duration.
-//
-// The MP3 frame parser extracts information from an MP3 data stream. It
-// accepts a range of frames of an MP3 stream as input, and parses all
-// frames for their duration. Callers can query the stream's overall
-// duration from the parser.
-//
-// Call the methods NotifyDataArrived or Parse to add new data. If you added
-// information for a certain stream position, you cannot go back to previous
-// positions. The parser will simply ignore the input. If you skip stream
-// positions, the duration of the related MP3 frames will be estimated from
-// the stream's average.
-//
-// The method GetDuration returns calculated duration of the stream, including
-// estimates for skipped ranges.
-//
-// All public methods are thread-safe.
-
-class MP3FrameParser
-{
-public:
- explicit MP3FrameParser(int64_t aLength=-1);
-
- bool IsMP3() {
- MutexAutoLock mon(mLock);
- return mIsMP3 != NOT_MP3;
- }
-
- void Parse(const uint8_t* aBuffer, uint32_t aLength, uint64_t aStreamOffset);
-
- // Returns the duration, in microseconds. If the entire stream has not
- // been parsed yet, this is an estimate based on the bitrate of the
- // frames parsed so far.
- int64_t GetDuration();
-
- // Returns the offset of the first MP3 frame in the stream, or -1 of
- // no MP3 frame has been detected yet.
- int64_t GetMP3Offset();
-
- // Returns true if we've seen the whole first frame of the MP3 stream, and
- // therefore can make an estimate on the stream duration.
- // Otherwise, returns false.
- bool ParsedHeaders();
-
- // Returns true if we know the exact duration of the MP3 stream;
- // false otherwise.
- bool HasExactDuration();
-
- // Returns true if the parser needs more data for duration estimation.
- bool NeedsData();
- // Assign the total lenght of this mp3 stream
- void SetLength(int64_t aLength) {
- MutexAutoLock mon(mLock);
- mLength = aLength;
- }
-private:
-
- // Parses aBuffer, starting at offset 0. Returns the number of bytes
- // parsed, relative to the start of the buffer. Note this may be
- // greater than aLength if the headers in the buffer indicate that
- // the frame or ID3 tag extends outside of aBuffer. Returns failure
- // if too many non-MP3 bytes are parsed.
- nsresult ParseBuffer(const uint8_t* aBuffer,
- uint32_t aLength,
- int64_t aStreamOffset,
- uint32_t* aOutBytesRead);
-
- // A low-contention lock for protecting the parser results
- Mutex mLock;
-
- // ID3 header parser. Keeps state between reads in case the header falls
- // in between.
- ID3Parser mID3Parser;
-
- // MP3 frame header parser.
- MP3Parser mMP3Parser;
-
- // If we read |MAX_SKIPPED_BYTES| from the stream without finding any MP3
- // frames, we give up and report |NOT_MP3|. Here we track the cumulative size
- // of any ID3 headers we've seen so big ID3 sections aren't counted towards
- // skipped bytes.
- uint32_t mTotalID3Size;
-
- // All fields below are protected by mLock
-
- // We keep stats on the size of all the frames we've seen, as well as how many
- // so that we can estimate the duration of the rest of the stream.
- uint64_t mTotalFrameSize;
- uint64_t mFrameCount;
-
- // Offset of the last data parsed. This is the end offset of the last data
- // block parsed, so it's the start offset we expect to get on the next
- // call to Parse().
- uint64_t mOffset;
-
- // Total length of the stream in bytes.
- int64_t mLength;
-
- // Offset of first MP3 frame in the bitstream. Has value -1 until the
- // first MP3 frame is found.
- int64_t mMP3Offset;
-
- // The exact number of frames in this stream, if we know it. -1 otherwise.
- int64_t mNumFrames;
-
- // Number of audio samples per second and per frame. Fixed through the whole
- // file. If we know these variables as well as the number of frames in the
- // file, we can get an exact duration for the stream.
- uint16_t mSamplesPerSecond;
- uint16_t mSamplesPerFrame;
-
- // If the MP3 has a variable bitrate, then there *should* be metadata about
- // the encoding in the first frame. We buffer the first frame here.
- nsCString mFirstFrame;
-
- // While we are reading the first frame, this is the stream offset of the
- // last byte of that frame. -1 at all other times.
- int64_t mFirstFrameEnd;
-
- enum eIsMP3 {
- MAYBE_MP3, // We're giving the stream the benefit of the doubt...
- DEFINITELY_MP3, // We've hit at least one ID3 tag or MP3 frame.
- NOT_MP3 // Not found any evidence of the stream being MP3.
- };
-
- eIsMP3 mIsMP3;
-
-};
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/MediaDecoder.cpp b/dom/media/MediaDecoder.cpp
index d027818de..cf2266bf6 100644
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -35,10 +35,6 @@
#include "Layers.h"
#include "mozilla/layers/ShadowLayers.h"
-#ifdef MOZ_ANDROID_OMX
-#include "AndroidBridge.h"
-#endif
-
using namespace mozilla::dom;
using namespace mozilla::layers;
using namespace mozilla::media;
@@ -1617,16 +1613,6 @@ MediaDecoder::IsWebMEnabled()
return Preferences::GetBool("media.webm.enabled");
}
-#ifdef MOZ_ANDROID_OMX
-bool
-MediaDecoder::IsAndroidMediaPluginEnabled()
-{
- return AndroidBridge::Bridge() &&
- AndroidBridge::Bridge()->GetAPIVersion() < 16 &&
- Preferences::GetBool("media.plugins.enabled");
-}
-#endif
-
NS_IMETHODIMP
MediaMemoryTracker::CollectReports(nsIHandleReportCallback* aHandleReport,
nsISupports* aData, bool aAnonymize)
diff --git a/dom/media/MediaDecoder.h b/dom/media/MediaDecoder.h
index a4edcbe72..7e93de044 100644
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -447,10 +447,6 @@ private:
static bool IsWaveEnabled();
static bool IsWebMEnabled();
-#ifdef MOZ_ANDROID_OMX
- static bool IsAndroidMediaPluginEnabled();
-#endif
-
#ifdef MOZ_WMF
static bool IsWMFEnabled();
#endif
diff --git a/dom/media/MediaPrefs.h b/dom/media/MediaPrefs.h
index e67796edd..c67a89989 100644
--- a/dom/media/MediaPrefs.h
+++ b/dom/media/MediaPrefs.h
@@ -105,9 +105,6 @@ private:
DECL_MEDIA_PREF("media.eme.enabled", EMEEnabled, bool, false);
DECL_MEDIA_PREF("media.use-blank-decoder", PDMUseBlankDecoder, bool, false);
DECL_MEDIA_PREF("media.gpu-process-decoder", PDMUseGPUDecoder, bool, false);
-#ifdef MOZ_GONK_MEDIACODEC
- DECL_MEDIA_PREF("media.gonk.enabled", PDMGonkDecoderEnabled, bool, true);
-#endif
#ifdef MOZ_WIDGET_ANDROID
DECL_MEDIA_PREF("media.android-media-codec.enabled", PDMAndroidMediaCodecEnabled, bool, false);
DECL_MEDIA_PREF("media.android-media-codec.preferred", PDMAndroidMediaCodecPreferred, bool, false);
diff --git a/dom/media/ThreadPoolCOMListener.h b/dom/media/ThreadPoolCOMListener.h
index 881013a78..424ca65d2 100644
--- a/dom/media/ThreadPoolCOMListener.h
+++ b/dom/media/ThreadPoolCOMListener.h
@@ -13,8 +13,8 @@
namespace mozilla {
// Thread pool listener which ensures that MSCOM is initialized and
-// deinitialized on the thread pool thread. We may call into WMF or
-// DirectShow on this thread, so we need MSCOM working.
+// deinitialized on the thread pool thread. We may call into WMF on
+// this thread, so we need MSCOM working.
class MSCOMInitThreadPoolListener final : public nsIThreadPoolListener {
~MSCOMInitThreadPoolListener() {}
public:
diff --git a/dom/media/WebVTTListener.h b/dom/media/WebVTTListener.h
index 67271664a..461d7f00d 100644
--- a/dom/media/WebVTTListener.h
+++ b/dom/media/WebVTTListener.h
@@ -10,6 +10,7 @@
#include "nsIStreamListener.h"
#include "nsIChannelEventSink.h"
#include "nsIInterfaceRequestor.h"
+#include "nsCOMPtr.h"
#include "nsCycleCollectionParticipant.h"
class nsIWebVTTParserWrapper;
diff --git a/dom/media/android/AndroidMediaDecoder.cpp b/dom/media/android/AndroidMediaDecoder.cpp
deleted file mode 100644
index 41ef3fcb0..000000000
--- a/dom/media/android/AndroidMediaDecoder.cpp
+++ /dev/null
@@ -1,25 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "MediaDecoderStateMachine.h"
-#include "AndroidMediaDecoder.h"
-#include "AndroidMediaReader.h"
-
-namespace mozilla {
-
-AndroidMediaDecoder::AndroidMediaDecoder(MediaDecoderOwner* aOwner,
- const nsACString& aType)
- : MediaDecoder(aOwner), mType(aType)
-{
-}
-
-MediaDecoderStateMachine* AndroidMediaDecoder::CreateStateMachine()
-{
- return new MediaDecoderStateMachine(this, new AndroidMediaReader(this, mType));
-}
-
-} // namespace mozilla
-
diff --git a/dom/media/android/AndroidMediaDecoder.h b/dom/media/android/AndroidMediaDecoder.h
deleted file mode 100644
index 88b5a243f..000000000
--- a/dom/media/android/AndroidMediaDecoder.h
+++ /dev/null
@@ -1,28 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#if !defined(AndroidMediaDecoder_h_)
-#define AndroidMediaDecoder_h_
-
-#include "MediaDecoder.h"
-#include "AndroidMediaDecoder.h"
-
-namespace mozilla {
-
-class AndroidMediaDecoder : public MediaDecoder
-{
- nsCString mType;
-public:
- AndroidMediaDecoder(MediaDecoderOwner* aOwner, const nsACString& aType);
-
- MediaDecoder* Clone(MediaDecoderOwner* aOwner) override {
- return new AndroidMediaDecoder(aOwner, mType);
- }
- MediaDecoderStateMachine* CreateStateMachine() override;
-};
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/android/AndroidMediaPluginHost.cpp b/dom/media/android/AndroidMediaPluginHost.cpp
deleted file mode 100644
index d4c4fc59e..000000000
--- a/dom/media/android/AndroidMediaPluginHost.cpp
+++ /dev/null
@@ -1,305 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#include "mozilla/Preferences.h"
-#include "MediaResource.h"
-#include "mozilla/dom/HTMLMediaElement.h"
-#include "mozilla/Services.h"
-#include "AndroidMediaPluginHost.h"
-#include "nsAutoPtr.h"
-#include "nsXPCOMStrings.h"
-#include "nsISeekableStream.h"
-#include "nsIGfxInfo.h"
-#include "prmem.h"
-#include "prlink.h"
-#include "AndroidMediaResourceServer.h"
-#include "nsServiceManagerUtils.h"
-
-#include "MPAPI.h"
-
-#include "nsIPropertyBag2.h"
-
-#if defined(ANDROID)
-#include "android/log.h"
-#define ALOG(args...) __android_log_print(ANDROID_LOG_INFO, "AndroidMediaPluginHost" , ## args)
-#else
-#define ALOG(args...) /* do nothing */
-#endif
-
-using namespace MPAPI;
-
-Decoder::Decoder() :
- mResource(nullptr), mPrivate(nullptr)
-{
-}
-
-namespace mozilla {
-
-static char* GetResource(Decoder *aDecoder)
-{
- return static_cast<char*>(aDecoder->mResource);
-}
-
-class GetIntPrefEvent : public Runnable {
-public:
- GetIntPrefEvent(const char* aPref, int32_t* aResult)
- : mPref(aPref), mResult(aResult) {}
- NS_IMETHOD Run() override {
- return Preferences::GetInt(mPref, mResult);
- }
-private:
- const char* mPref;
- int32_t* mResult;
-};
-
-static bool GetIntPref(const char* aPref, int32_t* aResult)
-{
- // GetIntPref() is called on the decoder thread, but the Preferences API
- // can only be called on the main thread. Post a runnable and wait.
- NS_ENSURE_TRUE(aPref, false);
- NS_ENSURE_TRUE(aResult, false);
- nsCOMPtr<nsIRunnable> event = new GetIntPrefEvent(aPref, aResult);
- return NS_SUCCEEDED(NS_DispatchToMainThread(event, NS_DISPATCH_SYNC));
-}
-
-static bool
-GetSystemInfoString(const char *aKey, char *aResult, size_t aResultLength)
-{
- NS_ENSURE_TRUE(aKey, false);
- NS_ENSURE_TRUE(aResult, false);
-
- nsCOMPtr<nsIPropertyBag2> infoService = do_GetService("@mozilla.org/system-info;1");
- NS_ASSERTION(infoService, "Could not find a system info service");
-
- nsAutoCString key(aKey);
- nsAutoCString info;
- nsresult rv = infoService->GetPropertyAsACString(NS_ConvertUTF8toUTF16(key),
- info);
-
- NS_ENSURE_SUCCESS(rv, false);
-
- strncpy(aResult, info.get(), aResultLength);
-
- return true;
-}
-
-static PluginHost sPluginHost = {
- nullptr,
- nullptr,
- nullptr,
- nullptr,
- GetIntPref,
- GetSystemInfoString
-};
-
-// Return true if Omx decoding is supported on the device. This checks the
-// built in whitelist/blacklist and preferences to see if that is overridden.
-static bool IsOmxSupported()
-{
- bool forceEnabled =
- Preferences::GetBool("stagefright.force-enabled", false);
- bool disabled =
- Preferences::GetBool("stagefright.disabled", false);
-
- if (disabled) {
- NS_WARNING("XXX stagefright disabled\n");
- return false;
- }
-
- if (!forceEnabled) {
- nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
- if (gfxInfo) {
- int32_t status;
- nsCString discardFailure;
- if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(nsIGfxInfo::FEATURE_STAGEFRIGHT, discardFailure, &status))) {
- if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
- NS_WARNING("XXX stagefright blacklisted\n");
- return false;
- }
- }
- }
- }
-
- return true;
-}
-
-// Return the name of the shared library that implements Omx based decoding. This varies
-// depending on libstagefright version installed on the device and whether it is B2G vs Android.
-// nullptr is returned if Omx decoding is not supported on the device,
-static const char* GetOmxLibraryName()
-{
-#if defined(ANDROID)
- nsCOMPtr<nsIPropertyBag2> infoService = do_GetService("@mozilla.org/system-info;1");
- NS_ASSERTION(infoService, "Could not find a system info service");
-
- int32_t version;
- nsresult rv = infoService->GetPropertyAsInt32(NS_LITERAL_STRING("version"), &version);
- if (NS_SUCCEEDED(rv)) {
- ALOG("Android Version is: %d", version);
- }
-
- nsAutoString release_version;
- rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("release_version"), release_version);
- if (NS_SUCCEEDED(rv)) {
- ALOG("Android Release Version is: %s", NS_LossyConvertUTF16toASCII(release_version).get());
- }
-
- nsAutoString device;
- rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("device"), device);
- if (NS_SUCCEEDED(rv)) {
- ALOG("Android Device is: %s", NS_LossyConvertUTF16toASCII(device).get());
- }
-
- nsAutoString manufacturer;
- rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("manufacturer"), manufacturer);
- if (NS_SUCCEEDED(rv)) {
- ALOG("Android Manufacturer is: %s", NS_LossyConvertUTF16toASCII(manufacturer).get());
- }
-
- nsAutoString hardware;
- rv = infoService->GetPropertyAsAString(NS_LITERAL_STRING("hardware"), hardware);
- if (NS_SUCCEEDED(rv)) {
- ALOG("Android Hardware is: %s", NS_LossyConvertUTF16toASCII(hardware).get());
- }
-#endif
-
- if (!IsOmxSupported())
- return nullptr;
-
-#if defined(ANDROID)
- if (version >= 17) {
- return "libomxpluginkk.so";
- }
-
- // Ice Cream Sandwich and Jellybean
- return "libomxplugin.so";
-
-#else
- return nullptr;
-#endif
-}
-
-AndroidMediaPluginHost::AndroidMediaPluginHost() {
- MOZ_COUNT_CTOR(AndroidMediaPluginHost);
- MOZ_ASSERT(NS_IsMainThread());
-
- mResourceServer = AndroidMediaResourceServer::Start();
-
- const char* name = GetOmxLibraryName();
- ALOG("Loading OMX Plugin: %s", name ? name : "nullptr");
- if (name) {
- char *path = PR_GetLibraryFilePathname("libxul.so", (PRFuncPtr) GetOmxLibraryName);
- PRLibrary *lib = nullptr;
- if (path) {
- nsAutoCString libpath(path);
- PR_Free(path);
- int32_t slash = libpath.RFindChar('/');
- if (slash != kNotFound) {
- libpath.Truncate(slash + 1);
- libpath.Append(name);
- lib = PR_LoadLibrary(libpath.get());
- }
- }
- if (!lib)
- lib = PR_LoadLibrary(name);
-
- if (lib) {
- Manifest *manifest = static_cast<Manifest *>(PR_FindSymbol(lib, "MPAPI_MANIFEST"));
- if (manifest) {
- mPlugins.AppendElement(manifest);
- ALOG("OMX plugin successfully loaded");
- }
- }
- }
-}
-
-AndroidMediaPluginHost::~AndroidMediaPluginHost() {
- mResourceServer->Stop();
- MOZ_COUNT_DTOR(AndroidMediaPluginHost);
-}
-
-bool AndroidMediaPluginHost::FindDecoder(const nsACString& aMimeType, const char* const** aCodecs)
-{
- const char *chars;
- size_t len = NS_CStringGetData(aMimeType, &chars, nullptr);
- for (size_t n = 0; n < mPlugins.Length(); ++n) {
- Manifest *plugin = mPlugins[n];
- const char* const *codecs;
- if (plugin->CanDecode(chars, len, &codecs)) {
- if (aCodecs)
- *aCodecs = codecs;
- return true;
- }
- }
- return false;
-}
-
-MPAPI::Decoder *AndroidMediaPluginHost::CreateDecoder(MediaResource *aResource, const nsACString& aMimeType)
-{
- NS_ENSURE_TRUE(aResource, nullptr);
-
- nsAutoPtr<Decoder> decoder(new Decoder());
- if (!decoder) {
- return nullptr;
- }
-
- const char *chars;
- size_t len = NS_CStringGetData(aMimeType, &chars, nullptr);
- for (size_t n = 0; n < mPlugins.Length(); ++n) {
- Manifest *plugin = mPlugins[n];
- const char* const *codecs;
- if (!plugin->CanDecode(chars, len, &codecs)) {
- continue;
- }
-
- nsCString url;
- nsresult rv = mResourceServer->AddResource(aResource, url);
- if (NS_FAILED (rv)) continue;
-
- decoder->mResource = strdup(url.get());
- if (plugin->CreateDecoder(&sPluginHost, decoder, chars, len)) {
- return decoder.forget();
- }
- }
-
- return nullptr;
-}
-
-void AndroidMediaPluginHost::DestroyDecoder(Decoder *aDecoder)
-{
- aDecoder->DestroyDecoder(aDecoder);
- char* resource = GetResource(aDecoder);
- if (resource) {
- // resource *shouldn't* be null, but check anyway just in case the plugin
- // decoder does something stupid.
- mResourceServer->RemoveResource(nsCString(resource));
- free(resource);
- }
- delete aDecoder;
-}
-
-AndroidMediaPluginHost *sAndroidMediaPluginHost = nullptr;
-AndroidMediaPluginHost *EnsureAndroidMediaPluginHost()
-{
- MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
- if (!sAndroidMediaPluginHost) {
- sAndroidMediaPluginHost = new AndroidMediaPluginHost();
- }
- return sAndroidMediaPluginHost;
-}
-
-AndroidMediaPluginHost *GetAndroidMediaPluginHost()
-{
- MOZ_ASSERT(sAndroidMediaPluginHost);
- return sAndroidMediaPluginHost;
-}
-
-void AndroidMediaPluginHost::Shutdown()
-{
- delete sAndroidMediaPluginHost;
- sAndroidMediaPluginHost = nullptr;
-}
-
-} // namespace mozilla
diff --git a/dom/media/android/AndroidMediaPluginHost.h b/dom/media/android/AndroidMediaPluginHost.h
deleted file mode 100644
index 854b7f21e..000000000
--- a/dom/media/android/AndroidMediaPluginHost.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#if !defined(AndroidMediaPluginHost_h_)
-#define AndroidMediaPluginHost_h_
-
-#include "nsTArray.h"
-#include "MediaResource.h"
-#include "MPAPI.h"
-#include "AndroidMediaResourceServer.h"
-
-namespace mozilla {
-
-class AndroidMediaPluginHost {
- RefPtr<AndroidMediaResourceServer> mResourceServer;
- nsTArray<MPAPI::Manifest *> mPlugins;
-
- MPAPI::Manifest *FindPlugin(const nsACString& aMimeType);
-public:
- AndroidMediaPluginHost();
- ~AndroidMediaPluginHost();
-
- static void Shutdown();
-
- bool FindDecoder(const nsACString& aMimeType, const char* const** aCodecs);
- MPAPI::Decoder *CreateDecoder(mozilla::MediaResource *aResource, const nsACString& aMimeType);
- void DestroyDecoder(MPAPI::Decoder *aDecoder);
-};
-
-// Must be called on the main thread. Creates the plugin host if it doesn't
-// already exist.
-AndroidMediaPluginHost *EnsureAndroidMediaPluginHost();
-
-// May be called on any thread after EnsureAndroidMediaPluginHost has been called.
-AndroidMediaPluginHost *GetAndroidMediaPluginHost();
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/android/AndroidMediaReader.cpp b/dom/media/android/AndroidMediaReader.cpp
deleted file mode 100644
index 12afacbc9..000000000
--- a/dom/media/android/AndroidMediaReader.cpp
+++ /dev/null
@@ -1,449 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#include "AndroidMediaReader.h"
-#include "mozilla/TimeStamp.h"
-#include "mozilla/gfx/Point.h"
-#include "MediaResource.h"
-#include "VideoUtils.h"
-#include "AndroidMediaDecoder.h"
-#include "AndroidMediaPluginHost.h"
-#include "MediaDecoderStateMachine.h"
-#include "ImageContainer.h"
-#include "AbstractMediaDecoder.h"
-#include "gfx2DGlue.h"
-#include "VideoFrameContainer.h"
-#include "mozilla/CheckedInt.h"
-
-namespace mozilla {
-
-using namespace mozilla::gfx;
-using namespace mozilla::media;
-
-typedef mozilla::layers::Image Image;
-typedef mozilla::layers::PlanarYCbCrImage PlanarYCbCrImage;
-
-AndroidMediaReader::AndroidMediaReader(AbstractMediaDecoder *aDecoder,
- const nsACString& aContentType) :
- MediaDecoderReader(aDecoder),
- mType(aContentType),
- mPlugin(nullptr),
- mHasAudio(false),
- mHasVideo(false),
- mVideoSeekTimeUs(-1),
- mAudioSeekTimeUs(-1)
-{
-}
-
-nsresult AndroidMediaReader::ReadMetadata(MediaInfo* aInfo,
- MetadataTags** aTags)
-{
- MOZ_ASSERT(OnTaskQueue());
-
- if (!mPlugin) {
- mPlugin = GetAndroidMediaPluginHost()->CreateDecoder(mDecoder->GetResource(), mType);
- if (!mPlugin) {
- return NS_ERROR_FAILURE;
- }
- }
-
- // Set the total duration (the max of the audio and video track).
- int64_t durationUs;
- mPlugin->GetDuration(mPlugin, &durationUs);
- if (durationUs) {
- mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(durationUs));
- }
-
- if (mPlugin->HasVideo(mPlugin)) {
- int32_t width, height;
- mPlugin->GetVideoParameters(mPlugin, &width, &height);
- nsIntRect pictureRect(0, 0, width, height);
-
- // Validate the container-reported frame and pictureRect sizes. This ensures
- // that our video frame creation code doesn't overflow.
- nsIntSize displaySize(width, height);
- nsIntSize frameSize(width, height);
- if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
- return NS_ERROR_FAILURE;
- }
-
- // Video track's frame sizes will not overflow. Activate the video track.
- mHasVideo = true;
- mInfo.mVideo.mDisplay = displaySize;
- mPicture = pictureRect;
- mInitialFrame = frameSize;
- VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
- if (container) {
- container->ClearCurrentFrame(IntSize(displaySize.width, displaySize.height));
- }
- }
-
- if (mPlugin->HasAudio(mPlugin)) {
- int32_t numChannels, sampleRate;
- mPlugin->GetAudioParameters(mPlugin, &numChannels, &sampleRate);
- mHasAudio = true;
- mInfo.mAudio.mChannels = numChannels;
- mInfo.mAudio.mRate = sampleRate;
- }
-
- *aInfo = mInfo;
- *aTags = nullptr;
- return NS_OK;
-}
-
-RefPtr<ShutdownPromise>
-AndroidMediaReader::Shutdown()
-{
- ResetDecode();
- if (mPlugin) {
- GetAndroidMediaPluginHost()->DestroyDecoder(mPlugin);
- mPlugin = nullptr;
- }
-
- return MediaDecoderReader::Shutdown();
-}
-
-// Resets all state related to decoding, emptying all buffers etc.
-nsresult AndroidMediaReader::ResetDecode(TrackSet aTracks)
-{
- if (mLastVideoFrame) {
- mLastVideoFrame = nullptr;
- }
- mSeekRequest.DisconnectIfExists();
- mSeekPromise.RejectIfExists(NS_OK, __func__);
- return MediaDecoderReader::ResetDecode(aTracks);
-}
-
-bool AndroidMediaReader::DecodeVideoFrame(bool &aKeyframeSkip,
- int64_t aTimeThreshold)
-{
- // Record number of frames decoded and parsed. Automatically update the
- // stats counters using the AutoNotifyDecoded stack-based class.
- AbstractMediaDecoder::AutoNotifyDecoded a(mDecoder);
-
- // Throw away the currently buffered frame if we are seeking.
- if (mLastVideoFrame && mVideoSeekTimeUs != -1) {
- mLastVideoFrame = nullptr;
- }
-
- ImageBufferCallback bufferCallback(mDecoder->GetImageContainer());
- RefPtr<Image> currentImage;
-
- // Read next frame
- while (true) {
- MPAPI::VideoFrame frame;
- if (!mPlugin->ReadVideo(mPlugin, &frame, mVideoSeekTimeUs, &bufferCallback)) {
- // We reached the end of the video stream. If we have a buffered
- // video frame, push it the video queue using the total duration
- // of the video as the end time.
- if (mLastVideoFrame) {
- int64_t durationUs;
- mPlugin->GetDuration(mPlugin, &durationUs);
- durationUs = std::max<int64_t>(durationUs - mLastVideoFrame->mTime, 0);
- RefPtr<VideoData> data = VideoData::ShallowCopyUpdateDuration(mLastVideoFrame,
- durationUs);
- mVideoQueue.Push(data);
- mLastVideoFrame = nullptr;
- }
- return false;
- }
- mVideoSeekTimeUs = -1;
-
- if (aKeyframeSkip) {
- // Disable keyframe skipping for now as
- // stagefright doesn't seem to be telling us
- // when a frame is a keyframe.
-#if 0
- if (!frame.mKeyFrame) {
- ++a.mStats.mParsedFrames;
- ++a.mStats.mDroppedFrames;
- continue;
- }
-#endif
- aKeyframeSkip = false;
- }
-
- if (frame.mSize == 0)
- return true;
-
- currentImage = bufferCallback.GetImage();
- int64_t pos = mDecoder->GetResource()->Tell();
- IntRect picture = mPicture;
-
- RefPtr<VideoData> v;
- if (currentImage) {
- gfx::IntSize frameSize = currentImage->GetSize();
- if (frameSize.width != mInitialFrame.width ||
- frameSize.height != mInitialFrame.height) {
- // Frame size is different from what the container reports. This is legal,
- // and we will preserve the ratio of the crop rectangle as it
- // was reported relative to the picture size reported by the container.
- picture.x = (mPicture.x * frameSize.width) / mInitialFrame.width;
- picture.y = (mPicture.y * frameSize.height) / mInitialFrame.height;
- picture.width = (frameSize.width * mPicture.width) / mInitialFrame.width;
- picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
- }
-
- v = VideoData::CreateFromImage(mInfo.mVideo,
- pos,
- frame.mTimeUs,
- 1, // We don't know the duration yet.
- currentImage,
- frame.mKeyFrame,
- -1,
- picture);
- } else {
- // Assume YUV
- VideoData::YCbCrBuffer b;
- b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
- b.mPlanes[0].mStride = frame.Y.mStride;
- b.mPlanes[0].mHeight = frame.Y.mHeight;
- b.mPlanes[0].mWidth = frame.Y.mWidth;
- b.mPlanes[0].mOffset = frame.Y.mOffset;
- b.mPlanes[0].mSkip = frame.Y.mSkip;
-
- b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
- b.mPlanes[1].mStride = frame.Cb.mStride;
- b.mPlanes[1].mHeight = frame.Cb.mHeight;
- b.mPlanes[1].mWidth = frame.Cb.mWidth;
- b.mPlanes[1].mOffset = frame.Cb.mOffset;
- b.mPlanes[1].mSkip = frame.Cb.mSkip;
-
- b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
- b.mPlanes[2].mStride = frame.Cr.mStride;
- b.mPlanes[2].mHeight = frame.Cr.mHeight;
- b.mPlanes[2].mWidth = frame.Cr.mWidth;
- b.mPlanes[2].mOffset = frame.Cr.mOffset;
- b.mPlanes[2].mSkip = frame.Cr.mSkip;
-
- if (frame.Y.mWidth != mInitialFrame.width ||
- frame.Y.mHeight != mInitialFrame.height) {
-
- // Frame size is different from what the container reports. This is legal,
- // and we will preserve the ratio of the crop rectangle as it
- // was reported relative to the picture size reported by the container.
- picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
- picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
- picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
- picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
- }
-
- // This is the approximate byte position in the stream.
- v = VideoData::CreateAndCopyData(mInfo.mVideo,
- mDecoder->GetImageContainer(),
- pos,
- frame.mTimeUs,
- 1, // We don't know the duration yet.
- b,
- frame.mKeyFrame,
- -1,
- picture);
- }
-
- if (!v) {
- return false;
- }
- a.mStats.mParsedFrames++;
- a.mStats.mDecodedFrames++;
- NS_ASSERTION(a.mStats.mDecodedFrames <= a.mStats.mParsedFrames, "Expect to decode fewer frames than parsed in AndroidMedia...");
-
- // Since MPAPI doesn't give us the end time of frames, we keep one frame
- // buffered in AndroidMediaReader and push it into the queue as soon
- // we read the following frame so we can use that frame's start time as
- // the end time of the buffered frame.
- if (!mLastVideoFrame) {
- mLastVideoFrame = v;
- continue;
- }
-
- // Calculate the duration as the timestamp of the current frame minus the
- // timestamp of the previous frame. We can then return the previously
- // decoded frame, and it will have a valid timestamp.
- int64_t duration = v->mTime - mLastVideoFrame->mTime;
- mLastVideoFrame = VideoData::ShallowCopyUpdateDuration(mLastVideoFrame, duration);
-
- // We have the start time of the next frame, so we can push the previous
- // frame into the queue, except if the end time is below the threshold,
- // in which case it wouldn't be displayed anyway.
- if (mLastVideoFrame->GetEndTime() < aTimeThreshold) {
- mLastVideoFrame = nullptr;
- continue;
- }
-
- // Buffer the current frame we just decoded.
- mVideoQueue.Push(mLastVideoFrame);
- mLastVideoFrame = v;
-
- break;
- }
-
- return true;
-}
-
-bool AndroidMediaReader::DecodeAudioData()
-{
- MOZ_ASSERT(OnTaskQueue());
-
- // This is the approximate byte position in the stream.
- int64_t pos = mDecoder->GetResource()->Tell();
-
- // Read next frame
- MPAPI::AudioFrame source;
- if (!mPlugin->ReadAudio(mPlugin, &source, mAudioSeekTimeUs)) {
- return false;
- }
- mAudioSeekTimeUs = -1;
-
- // Ignore empty buffers which stagefright media read will sporadically return
- if (source.mSize == 0)
- return true;
-
- uint32_t frames = source.mSize / (source.mAudioChannels *
- sizeof(AudioDataValue));
-
- typedef AudioCompactor::NativeCopy MPCopy;
- return mAudioCompactor.Push(pos,
- source.mTimeUs,
- source.mAudioSampleRate,
- frames,
- source.mAudioChannels,
- MPCopy(static_cast<uint8_t *>(source.mData),
- source.mSize,
- source.mAudioChannels));
-}
-
-RefPtr<MediaDecoderReader::SeekPromise>
-AndroidMediaReader::Seek(SeekTarget aTarget, int64_t aEndTime)
-{
- MOZ_ASSERT(OnTaskQueue());
-
- RefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);
- if (mHasAudio && mHasVideo) {
- // The decoder seeks/demuxes audio and video streams separately. So if
- // we seek both audio and video to aTarget, the audio stream can typically
- // seek closer to the seek target, since typically every audio block is
- // a sync point, whereas for video there are only keyframes once every few
- // seconds. So if we have both audio and video, we must seek the video
- // stream to the preceeding keyframe first, get the stream time, and then
- // seek the audio stream to match the video stream's time. Otherwise, the
- // audio and video streams won't be in sync after the seek.
- mVideoSeekTimeUs = aTarget.GetTime().ToMicroseconds();
-
- RefPtr<AndroidMediaReader> self = this;
- mSeekRequest.Begin(DecodeToFirstVideoData()->Then(OwnerThread(), __func__, [self] (MediaData* v) {
- self->mSeekRequest.Complete();
- self->mAudioSeekTimeUs = v->mTime;
- self->mSeekPromise.Resolve(media::TimeUnit::FromMicroseconds(self->mAudioSeekTimeUs), __func__);
- }, [self, aTarget] () {
- self->mSeekRequest.Complete();
- self->mAudioSeekTimeUs = aTarget.GetTime().ToMicroseconds();
- self->mSeekPromise.Resolve(aTarget.GetTime(), __func__);
- }));
- } else {
- mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget.GetTime().ToMicroseconds();
- mSeekPromise.Resolve(aTarget.GetTime(), __func__);
- }
-
- return p;
-}
-
-AndroidMediaReader::ImageBufferCallback::ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer) :
- mImageContainer(aImageContainer)
-{
-}
-
-void *
-AndroidMediaReader::ImageBufferCallback::operator()(size_t aWidth, size_t aHeight,
- MPAPI::ColorFormat aColorFormat)
-{
- if (!mImageContainer) {
- NS_WARNING("No image container to construct an image");
- return nullptr;
- }
-
- RefPtr<Image> image;
- switch(aColorFormat) {
- case MPAPI::RGB565:
- image = mozilla::layers::CreateSharedRGBImage(mImageContainer,
- nsIntSize(aWidth, aHeight),
- SurfaceFormat::R5G6B5_UINT16);
- if (!image) {
- NS_WARNING("Could not create rgb image");
- return nullptr;
- }
-
- mImage = image;
- return image->GetBuffer();
- case MPAPI::I420:
- return CreateI420Image(aWidth, aHeight);
- default:
- NS_NOTREACHED("Color format not supported");
- return nullptr;
- }
-}
-
-uint8_t *
-AndroidMediaReader::ImageBufferCallback::CreateI420Image(size_t aWidth,
- size_t aHeight)
-{
- RefPtr<PlanarYCbCrImage> yuvImage = mImageContainer->CreatePlanarYCbCrImage();
- mImage = yuvImage;
-
- if (!yuvImage) {
- NS_WARNING("Could not create I420 image");
- return nullptr;
- }
-
- // Use uint32_t throughout to match AllocateAndGetNewBuffer's param
- const auto checkedFrameSize =
- CheckedInt<uint32_t>(aWidth) * aHeight;
-
- // Allocate enough for one full resolution Y plane
- // and two quarter resolution Cb/Cr planes.
- const auto checkedBufferSize =
- checkedFrameSize + checkedFrameSize / 2;
-
- if (!checkedBufferSize.isValid()) { // checks checkedFrameSize too
- NS_WARNING("Could not create I420 image");
- return nullptr;
- }
-
- const auto frameSize = checkedFrameSize.value();
-
- uint8_t *buffer =
- yuvImage->AllocateAndGetNewBuffer(checkedBufferSize.value());
-
- mozilla::layers::PlanarYCbCrData frameDesc;
-
- frameDesc.mYChannel = buffer;
- frameDesc.mCbChannel = buffer + frameSize;
- frameDesc.mCrChannel = frameDesc.mCbChannel + frameSize / 4;
-
- frameDesc.mYSize = IntSize(aWidth, aHeight);
- frameDesc.mCbCrSize = IntSize(aWidth / 2, aHeight / 2);
-
- frameDesc.mYStride = aWidth;
- frameDesc.mCbCrStride = aWidth / 2;
-
- frameDesc.mYSkip = 0;
- frameDesc.mCbSkip = 0;
- frameDesc.mCrSkip = 0;
-
- frameDesc.mPicX = 0;
- frameDesc.mPicY = 0;
- frameDesc.mPicSize = IntSize(aWidth, aHeight);
-
- yuvImage->AdoptData(frameDesc);
-
- return buffer;
-}
-
-already_AddRefed<Image>
-AndroidMediaReader::ImageBufferCallback::GetImage()
-{
- return mImage.forget();
-}
-
-} // namespace mozilla
diff --git a/dom/media/android/AndroidMediaReader.h b/dom/media/android/AndroidMediaReader.h
deleted file mode 100644
index def85a343..000000000
--- a/dom/media/android/AndroidMediaReader.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#if !defined(AndroidMediaReader_h_)
-#define AndroidMediaReader_h_
-
-#include "mozilla/Attributes.h"
-#include "MediaResource.h"
-#include "MediaDecoderReader.h"
-#include "ImageContainer.h"
-#include "mozilla/layers/SharedRGBImage.h"
-
-#include "MPAPI.h"
-
-class nsACString;
-
-namespace mozilla {
-
-class AbstractMediaDecoder;
-
-namespace layers {
-class ImageContainer;
-}
-
-class AndroidMediaReader : public MediaDecoderReader
-{
- nsCString mType;
- MPAPI::Decoder *mPlugin;
- bool mHasAudio;
- bool mHasVideo;
- nsIntRect mPicture;
- nsIntSize mInitialFrame;
- int64_t mVideoSeekTimeUs;
- int64_t mAudioSeekTimeUs;
- RefPtr<VideoData> mLastVideoFrame;
- MozPromiseHolder<MediaDecoderReader::SeekPromise> mSeekPromise;
- MozPromiseRequestHolder<MediaDecoderReader::MediaDataPromise> mSeekRequest;
-public:
- AndroidMediaReader(AbstractMediaDecoder* aDecoder,
- const nsACString& aContentType);
-
- nsresult ResetDecode(TrackSet aTracks = TrackSet(TrackInfo::kAudioTrack,
- TrackInfo::kVideoTrack)) override;
-
- bool DecodeAudioData() override;
- bool DecodeVideoFrame(bool &aKeyframeSkip, int64_t aTimeThreshold) override;
-
- nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) override;
- RefPtr<SeekPromise> Seek(SeekTarget aTarget, int64_t aEndTime) override;
-
- RefPtr<ShutdownPromise> Shutdown() override;
-
- class ImageBufferCallback : public MPAPI::BufferCallback {
- typedef mozilla::layers::Image Image;
-
- public:
- ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer);
- void *operator()(size_t aWidth, size_t aHeight,
- MPAPI::ColorFormat aColorFormat) override;
- already_AddRefed<Image> GetImage();
-
- private:
- uint8_t *CreateI420Image(size_t aWidth, size_t aHeight);
-
- mozilla::layers::ImageContainer *mImageContainer;
- RefPtr<Image> mImage;
- };
-
-};
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/android/AndroidMediaResourceServer.cpp b/dom/media/android/AndroidMediaResourceServer.cpp
deleted file mode 100644
index bd76a8c68..000000000
--- a/dom/media/android/AndroidMediaResourceServer.cpp
+++ /dev/null
@@ -1,503 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#include "mozilla/Assertions.h"
-#include "mozilla/Base64.h"
-#include "mozilla/IntegerPrintfMacros.h"
-#include "mozilla/UniquePtr.h"
-#include "nsThreadUtils.h"
-#include "nsIServiceManager.h"
-#include "nsISocketTransport.h"
-#include "nsIOutputStream.h"
-#include "nsIInputStream.h"
-#include "nsIRandomGenerator.h"
-#include "nsReadLine.h"
-#include "nsNetCID.h"
-#include "VideoUtils.h"
-#include "MediaResource.h"
-#include "AndroidMediaResourceServer.h"
-
-#if defined(_MSC_VER)
-#define strtoll _strtoi64
-#endif
-
-using namespace mozilla;
-
-/*
- ReadCRLF is a variant of NS_ReadLine from nsReadLine.h that deals
- with the carriage return/line feed requirements of HTTP requests.
-*/
-template<typename CharT, class StreamType, class StringType>
-nsresult
-ReadCRLF (StreamType* aStream, nsLineBuffer<CharT> * aBuffer,
- StringType & aLine, bool *aMore)
-{
- // eollast is true if the last character in the buffer is a '\r',
- // signaling a potential '\r\n' sequence split between reads.
- bool eollast = false;
-
- aLine.Truncate();
-
- while (1) { // will be returning out of this loop on eol or eof
- if (aBuffer->start == aBuffer->end) { // buffer is empty. Read into it.
- uint32_t bytesRead;
- nsresult rv = aStream->Read(aBuffer->buf, kLineBufferSize, &bytesRead);
- if (NS_FAILED(rv) || bytesRead == 0) {
- *aMore = false;
- return rv;
- }
- aBuffer->start = aBuffer->buf;
- aBuffer->end = aBuffer->buf + bytesRead;
- *(aBuffer->end) = '\0';
- }
-
- /*
- * Walk the buffer looking for an end-of-line.
- * There are 4 cases to consider:
- * 1. the CR char is the last char in the buffer
- * 2. the CRLF sequence are the last characters in the buffer
- * 3. the CRLF sequence + one or more chars at the end of the buffer
- * we need at least one char after the first CRLF sequence to
- * set |aMore| correctly.
- * 4. The LF character is the first char in the buffer when eollast is
- * true.
- */
- CharT* current = aBuffer->start;
- if (eollast) { // Case 4
- if (*current == '\n') {
- aBuffer->start = ++current;
- *aMore = true;
- return NS_OK;
- }
- else {
- eollast = false;
- aLine.Append('\r');
- }
- }
- // Cases 2 and 3
- for ( ; current < aBuffer->end-1; ++current) {
- if (*current == '\r' && *(current+1) == '\n') {
- *current++ = '\0';
- *current++ = '\0';
- aLine.Append(aBuffer->start);
- aBuffer->start = current;
- *aMore = true;
- return NS_OK;
- }
- }
- // Case 1
- if (*current == '\r') {
- eollast = true;
- *current++ = '\0';
- }
-
- aLine.Append(aBuffer->start);
- aBuffer->start = aBuffer->end; // mark the buffer empty
- }
-}
-
-// Each client HTTP request results in a thread being spawned to process it.
-// That thread has a single event dispatched to it which handles the HTTP
-// protocol. It parses the headers and forwards data from the MediaResource
-// associated with the URL back to client. When the request is complete it will
-// shutdown the thread.
-class ServeResourceEvent : public Runnable {
-private:
- // Reading from this reads the data sent from the client.
- nsCOMPtr<nsIInputStream> mInput;
-
- // Writing to this sends data to the client.
- nsCOMPtr<nsIOutputStream> mOutput;
-
- // The AndroidMediaResourceServer that owns the MediaResource instances
- // served. This is used to lookup the MediaResource from the URL.
- RefPtr<AndroidMediaResourceServer> mServer;
-
- // Write 'aBufferLength' bytes from 'aBuffer' to 'mOutput'. This
- // method ensures all the data is written by checking the number
- // of bytes returned from the output streams 'Write' method and
- // looping until done.
- nsresult WriteAll(char const* aBuffer, int32_t aBufferLength);
-
-public:
- ServeResourceEvent(nsIInputStream* aInput, nsIOutputStream* aOutput,
- AndroidMediaResourceServer* aServer)
- : mInput(aInput), mOutput(aOutput), mServer(aServer) {}
-
- // This method runs on the thread and exits when it has completed the
- // HTTP request.
- NS_IMETHOD Run();
-
- // Given the first line of an HTTP request, parse the URL requested and
- // return the MediaResource for that URL.
- already_AddRefed<MediaResource> GetMediaResource(nsCString const& aHTTPRequest);
-
- // Gracefully shutdown the thread and cleanup resources
- void Shutdown();
-};
-
-nsresult
-ServeResourceEvent::WriteAll(char const* aBuffer, int32_t aBufferLength)
-{
- while (aBufferLength > 0) {
- uint32_t written = 0;
- nsresult rv = mOutput->Write(aBuffer, aBufferLength, &written);
- if (NS_FAILED (rv)) return rv;
-
- aBufferLength -= written;
- aBuffer += written;
- }
-
- return NS_OK;
-}
-
-already_AddRefed<MediaResource>
-ServeResourceEvent::GetMediaResource(nsCString const& aHTTPRequest)
-{
- // Check that the HTTP method is GET
- const char* HTTP_METHOD = "GET ";
- if (strncmp(aHTTPRequest.get(), HTTP_METHOD, strlen(HTTP_METHOD)) != 0) {
- return nullptr;
- }
-
- const char* url_start = strchr(aHTTPRequest.get(), ' ');
- if (!url_start) {
- return nullptr;
- }
-
- const char* url_end = strrchr(++url_start, ' ');
- if (!url_end) {
- return nullptr;
- }
-
- // The path extracted from the HTTP request is used as a key in hash
- // table. It is not related to retrieving data from the filesystem so
- // we don't need to do any sanity checking on ".." paths and similar
- // exploits.
- nsCString relative(url_start, url_end - url_start);
- RefPtr<MediaResource> resource =
- mServer->GetResource(mServer->GetURLPrefix() + relative);
- return resource.forget();
-}
-
-NS_IMETHODIMP
-ServeResourceEvent::Run() {
- bool more = false; // Are there HTTP headers to read after the first line
- nsCString line; // Contains the current line read from input stream
- nsLineBuffer<char>* buffer = new nsLineBuffer<char>();
- nsresult rv = ReadCRLF(mInput.get(), buffer, line, &more);
- if (NS_FAILED(rv)) { Shutdown(); return rv; }
-
- // First line contains the HTTP GET request. Extract the URL and obtain
- // the MediaResource for it.
- RefPtr<MediaResource> resource = GetMediaResource(line);
- if (!resource) {
- const char* response_404 = "HTTP/1.1 404 Not Found\r\n"
- "Content-Length: 0\r\n\r\n";
- rv = WriteAll(response_404, strlen(response_404));
- Shutdown();
- return rv;
- }
-
- // Offset in bytes to start reading from resource.
- // This is zero by default but can be set to another starting value if
- // this HTTP request includes a byte range request header.
- int64_t start = 0;
-
- // Keep reading lines until we get a zero length line, which is the HTTP
- // protocol's way of signifying the end of headers and start of body, or
- // until we have no more data to read.
- while (more && line.Length() > 0) {
- rv = ReadCRLF(mInput.get(), buffer, line, &more);
- if (NS_FAILED(rv)) { Shutdown(); return rv; }
-
- // Look for a byte range request header. If there is one, set the
- // media resource offset to start from to that requested. Here we
- // only check for the range request format used by Android rather
- // than implementing all possibilities in the HTTP specification.
- // That is, the range request is of the form:
- // Range: bytes=nnnn-
- // Were 'nnnn' is an integer number.
- // The end of the range is not checked, instead we return up to
- // the end of the resource and the client is informed of this via
- // the content-range header.
- NS_NAMED_LITERAL_CSTRING(byteRange, "Range: bytes=");
- const char* s = strstr(line.get(), byteRange.get());
- if (s) {
- start = strtoll(s+byteRange.Length(), nullptr, 10);
-
- // Clamp 'start' to be between 0 and the resource length.
- start = std::max(int64_t(0), std::min(resource->GetLength(), start));
- }
- }
-
- // HTTP response to use if this is a non byte range request
- const char* response_normal = "HTTP/1.1 200 OK\r\n";
-
- // HTTP response to use if this is a byte range request
- const char* response_range = "HTTP/1.1 206 Partial Content\r\n";
-
- // End of HTTP reponse headers is indicated by an empty line.
- const char* response_end = "\r\n";
-
- // If the request was a byte range request, we need to read from the
- // requested offset. If the resource is non-seekable, or the seek
- // fails, then the start offset is set back to zero. This results in all
- // HTTP response data being as if the byte range request was not made.
- if (start > 0 && !resource->IsTransportSeekable()) {
- start = 0;
- }
-
- const char* response_line = start > 0 ?
- response_range :
- response_normal;
- rv = WriteAll(response_line, strlen(response_line));
- if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
-
- // Buffer used for reading from the input stream and writing to
- // the output stream. The buffer size should be big enough for the
- // HTTP response headers sent below. A static_assert ensures
- // this where the buffer is used.
- const int buffer_size = 32768;
- auto b = MakeUnique<char[]>(buffer_size);
-
- // If we know the length of the resource, send a Content-Length header.
- int64_t contentlength = resource->GetLength() - start;
- if (contentlength > 0) {
- static_assert (buffer_size > 1024,
- "buffer_size must be large enough "
- "to hold response headers");
- snprintf(b.get(), buffer_size, "Content-Length: %" PRId64 "\r\n", contentlength);
- rv = WriteAll(b.get(), strlen(b.get()));
- if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
- }
-
- // If the request was a byte range request, respond with a Content-Range
- // header which details the extent of the data returned.
- if (start > 0) {
- static_assert (buffer_size > 1024,
- "buffer_size must be large enough "
- "to hold response headers");
- snprintf(b.get(), buffer_size, "Content-Range: "
- "bytes %" PRId64 "-%" PRId64 "/%" PRId64 "\r\n",
- start, resource->GetLength() - 1, resource->GetLength());
- rv = WriteAll(b.get(), strlen(b.get()));
- if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
- }
-
- rv = WriteAll(response_end, strlen(response_end));
- if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
-
- rv = mOutput->Flush();
- if (NS_FAILED(rv)) { Shutdown(); return NS_OK; }
-
- // Read data from media resource
- uint32_t bytesRead = 0; // Number of bytes read/written to streams
- rv = resource->ReadAt(start, b.get(), buffer_size, &bytesRead);
- while (NS_SUCCEEDED(rv) && bytesRead != 0) {
- // Keep track of what we think the starting position for the next read
- // is. This is used in subsequent ReadAt calls to ensure we are reading
- // from the correct offset in the case where another thread is reading
- // from th same MediaResource.
- start += bytesRead;
-
- // Write data obtained from media resource to output stream
- rv = WriteAll(b.get(), bytesRead);
- if (NS_FAILED (rv)) break;
-
- rv = resource->ReadAt(start, b.get(), 32768, &bytesRead);
- }
-
- Shutdown();
- return NS_OK;
-}
-
-void
-ServeResourceEvent::Shutdown()
-{
- // Cleanup resources and exit.
- mInput->Close();
- mOutput->Close();
-
- // To shutdown the current thread we need to first exit this event.
- // The Shutdown event below is posted to the main thread to do this.
- nsCOMPtr<nsIRunnable> event = new ShutdownThreadEvent(NS_GetCurrentThread());
- NS_DispatchToMainThread(event);
-}
-
-/*
- This is the listener attached to the server socket. When an HTTP
- request is made by the client the OnSocketAccepted method is
- called. This method will spawn a thread to process the request.
- The thread receives a single event which does the parsing of
- the HTTP request and forwarding the data from the MediaResource
- to the output stream of the request.
-
- The MediaResource used for providing the request data is obtained
- from the AndroidMediaResourceServer that created this listener, using the
- URL the client requested.
-*/
-class ResourceSocketListener : public nsIServerSocketListener
-{
-public:
- // The AndroidMediaResourceServer used to look up the MediaResource
- // on requests.
- RefPtr<AndroidMediaResourceServer> mServer;
-
- NS_DECL_THREADSAFE_ISUPPORTS
- NS_DECL_NSISERVERSOCKETLISTENER
-
- ResourceSocketListener(AndroidMediaResourceServer* aServer) :
- mServer(aServer)
- {
- }
-
-private:
- virtual ~ResourceSocketListener() { }
-};
-
-NS_IMPL_ISUPPORTS(ResourceSocketListener, nsIServerSocketListener)
-
-NS_IMETHODIMP
-ResourceSocketListener::OnSocketAccepted(nsIServerSocket* aServ,
- nsISocketTransport* aTrans)
-{
- nsCOMPtr<nsIInputStream> input;
- nsCOMPtr<nsIOutputStream> output;
- nsresult rv;
-
- rv = aTrans->OpenInputStream(nsITransport::OPEN_BLOCKING, 0, 0, getter_AddRefs(input));
- if (NS_FAILED(rv)) return rv;
-
- rv = aTrans->OpenOutputStream(nsITransport::OPEN_BLOCKING, 0, 0, getter_AddRefs(output));
- if (NS_FAILED(rv)) return rv;
-
- nsCOMPtr<nsIThread> thread;
- rv = NS_NewThread(getter_AddRefs(thread));
- if (NS_FAILED(rv)) return rv;
-
- nsCOMPtr<nsIRunnable> event = new ServeResourceEvent(input.get(), output.get(), mServer);
- return thread->Dispatch(event, NS_DISPATCH_NORMAL);
-}
-
-NS_IMETHODIMP
-ResourceSocketListener::OnStopListening(nsIServerSocket* aServ, nsresult aStatus)
-{
- return NS_OK;
-}
-
-AndroidMediaResourceServer::AndroidMediaResourceServer() :
- mMutex("AndroidMediaResourceServer")
-{
-}
-
-NS_IMETHODIMP
-AndroidMediaResourceServer::Run()
-{
- MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
- MutexAutoLock lock(mMutex);
-
- nsresult rv;
- mSocket = do_CreateInstance(NS_SERVERSOCKET_CONTRACTID, &rv);
- if (NS_FAILED(rv)) return rv;
-
- rv = mSocket->InitSpecialConnection(-1,
- nsIServerSocket::LoopbackOnly
- | nsIServerSocket::KeepWhenOffline,
- -1);
- if (NS_FAILED(rv)) return rv;
-
- rv = mSocket->AsyncListen(new ResourceSocketListener(this));
- if (NS_FAILED(rv)) return rv;
-
- return NS_OK;
-}
-
-/* static */
-already_AddRefed<AndroidMediaResourceServer>
-AndroidMediaResourceServer::Start()
-{
- MOZ_ASSERT(NS_IsMainThread());
- RefPtr<AndroidMediaResourceServer> server = new AndroidMediaResourceServer();
- server->Run();
- return server.forget();
-}
-
-void
-AndroidMediaResourceServer::Stop()
-{
- MutexAutoLock lock(mMutex);
- mSocket->Close();
- mSocket = nullptr;
-}
-
-nsresult
-AndroidMediaResourceServer::AppendRandomPath(nsCString& aUrl)
-{
- // Use a cryptographic quality PRNG to generate raw random bytes
- // and convert that to a base64 string for use as an URL path. This
- // is based on code from nsExternalAppHandler::SetUpTempFile.
- nsresult rv;
- nsAutoCString salt;
- rv = GenerateRandomPathName(salt, 16);
- if (NS_FAILED(rv)) return rv;
- aUrl += "/";
- aUrl += salt;
- return NS_OK;
-}
-
-nsresult
-AndroidMediaResourceServer::AddResource(mozilla::MediaResource* aResource, nsCString& aUrl)
-{
- nsCString url = GetURLPrefix();
- nsresult rv = AppendRandomPath(url);
- if (NS_FAILED (rv)) return rv;
-
- {
- MutexAutoLock lock(mMutex);
-
- // Adding a resource URL that already exists is considered an error.
- if (mResources.find(url) != mResources.end()) return NS_ERROR_FAILURE;
- mResources[url] = aResource;
- }
-
- aUrl = url;
-
- return NS_OK;
-}
-
-void
-AndroidMediaResourceServer::RemoveResource(nsCString const& aUrl)
-{
- MutexAutoLock lock(mMutex);
- mResources.erase(aUrl);
-}
-
-nsCString
-AndroidMediaResourceServer::GetURLPrefix()
-{
- MutexAutoLock lock(mMutex);
-
- int32_t port = 0;
- nsresult rv = mSocket->GetPort(&port);
- if (NS_FAILED (rv) || port < 0) {
- return nsCString("");
- }
-
- char buffer[256];
- snprintf(buffer, sizeof(buffer), "http://127.0.0.1:%d", port >= 0 ? port : 0);
- return nsCString(buffer);
-}
-
-already_AddRefed<MediaResource>
-AndroidMediaResourceServer::GetResource(nsCString const& aUrl)
-{
- MutexAutoLock lock(mMutex);
- ResourceMap::const_iterator it = mResources.find(aUrl);
- if (it == mResources.end()) return nullptr;
-
- RefPtr<MediaResource> resource = it->second;
- return resource.forget();
-}
diff --git a/dom/media/android/AndroidMediaResourceServer.h b/dom/media/android/AndroidMediaResourceServer.h
deleted file mode 100644
index 68200f9c0..000000000
--- a/dom/media/android/AndroidMediaResourceServer.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#if !defined(AndroidMediaResourceServer_h_)
-#define AndroidMediaResourceServer_h_
-
-#include <map>
-#include "nsIServerSocket.h"
-#include "MediaResource.h"
-
-namespace mozilla {
-
-class MediaResource;
-
-/*
- AndroidMediaResourceServer instantiates a socket server that understands
- HTTP requests for MediaResource instances. The server runs on an
- automatically selected port and MediaResource instances are registered.
- The registration returns a string URL than can be used to fetch the
- resource. That URL contains a randomly generated path to make it
- difficult for other local applications on the device to guess it.
-
- The HTTP protocol is limited in that it supports only what the
- Android DataSource implementation uses to fetch media. It
- understands HTTP GET and byte range requests.
-
- The intent of this class is to be used in Media backends that
- have a system component that does its own network requests. These
- requests are made against this server which then uses standard
- Gecko network requests and media cache usage.
-
- The AndroidMediaResourceServer can be instantiated on any thread and
- its methods are threadsafe - they can be called on any thread.
- The server socket itself is always run on the main thread and
- this is done by the Start() static method by synchronously
- dispatching to the main thread.
-*/
-class AndroidMediaResourceServer : public Runnable
-{
-private:
- // Mutex protecting private members of AndroidMediaResourceServer.
- // All member variables below this point in the class definition
- // must acquire the mutex before access.
- mozilla::Mutex mMutex;
-
- // Server socket used to listen for incoming connections
- nsCOMPtr<nsIServerSocket> mSocket;
-
- // Mapping between MediaResource URL's to the MediaResource
- // object served at that URL.
- typedef std::map<nsCString,
- RefPtr<mozilla::MediaResource> > ResourceMap;
- ResourceMap mResources;
-
- // Create a AndroidMediaResourceServer that will listen on an automatically
- // selected port when started. This is private as it should only be
- // called internally from the public 'Start' method.
- AndroidMediaResourceServer();
- NS_IMETHOD Run();
-
- // Append a random URL path to a string. This is used for creating a
- // unique URl for a resource which helps prevent malicious software
- // running on the same machine as the server from guessing the URL
- // and accessing video data.
- nsresult AppendRandomPath(nsCString& aURL);
-
-public:
- // Create a AndroidMediaResourceServer and start it listening. This call will
- // perform a synchronous request on the main thread.
- static already_AddRefed<AndroidMediaResourceServer> Start();
-
- // Stops the server from listening and accepting further connections.
- void Stop();
-
- // Add a MediaResource to be served by this server. Stores the
- // absolute URL that can be used to access the resource in 'aUrl'.
- nsresult AddResource(mozilla::MediaResource* aResource, nsCString& aUrl);
-
- // Remove a MediaResource so it is no longer served by this server.
- // The URL provided must match exactly that provided by a previous
- // call to "AddResource".
- void RemoveResource(nsCString const& aUrl);
-
- // Returns the prefix for HTTP requests to the server. This plus
- // the result of AddResource results in an Absolute URL.
- nsCString GetURLPrefix();
-
- // Returns the resource asociated with a given URL
- already_AddRefed<mozilla::MediaResource> GetResource(nsCString const& aUrl);
-};
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/android/MPAPI.h b/dom/media/android/MPAPI.h
deleted file mode 100644
index 9b289ca09..000000000
--- a/dom/media/android/MPAPI.h
+++ /dev/null
@@ -1,165 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#if !defined(MPAPI_h_)
-#define MPAPI_h_
-
-#include <stdint.h>
-
-namespace MPAPI {
-
-enum ColorFormat {
- I420,
- RGB565
-};
-
-/*
- * A callback for the plugin to use to request a buffer owned by gecko. This can
- * save us a copy or two down the line.
- */
-class BufferCallback {
-public:
- virtual void *operator()(size_t aWidth, size_t aHeight,
- ColorFormat aColorFormat) = 0;
-};
-
-struct VideoPlane {
- VideoPlane() :
- mData(0),
- mStride(0),
- mWidth(0),
- mHeight(0),
- mOffset(0),
- mSkip(0)
- {}
-
- void *mData;
- int32_t mStride;
- int32_t mWidth;
- int32_t mHeight;
- int32_t mOffset;
- int32_t mSkip;
-};
-
-struct VideoFrame {
- int64_t mTimeUs;
- bool mKeyFrame;
- void *mData;
- size_t mSize;
- int32_t mStride;
- int32_t mSliceHeight;
- int32_t mRotation;
- VideoPlane Y;
- VideoPlane Cb;
- VideoPlane Cr;
-
- VideoFrame() :
- mTimeUs(0),
- mKeyFrame(false),
- mData(0),
- mSize(0),
- mStride(0),
- mSliceHeight(0),
- mRotation(0)
- {}
-
- void Set(int64_t aTimeUs, bool aKeyFrame,
- void *aData, size_t aSize, int32_t aStride, int32_t aSliceHeight, int32_t aRotation,
- void *aYData, int32_t aYStride, int32_t aYWidth, int32_t aYHeight, int32_t aYOffset, int32_t aYSkip,
- void *aCbData, int32_t aCbStride, int32_t aCbWidth, int32_t aCbHeight, int32_t aCbOffset, int32_t aCbSkip,
- void *aCrData, int32_t aCrStride, int32_t aCrWidth, int32_t aCrHeight, int32_t aCrOffset, int32_t aCrSkip)
- {
- mTimeUs = aTimeUs;
- mKeyFrame = aKeyFrame;
- mData = aData;
- mSize = aSize;
- mStride = aStride;
- mSliceHeight = aSliceHeight;
- mRotation = aRotation;
- Y.mData = aYData;
- Y.mStride = aYStride;
- Y.mWidth = aYWidth;
- Y.mHeight = aYHeight;
- Y.mOffset = aYOffset;
- Y.mSkip = aYSkip;
- Cb.mData = aCbData;
- Cb.mStride = aCbStride;
- Cb.mWidth = aCbWidth;
- Cb.mHeight = aCbHeight;
- Cb.mOffset = aCbOffset;
- Cb.mSkip = aCbSkip;
- Cr.mData = aCrData;
- Cr.mStride = aCrStride;
- Cr.mWidth = aCrWidth;
- Cr.mHeight = aCrHeight;
- Cr.mOffset = aCrOffset;
- Cr.mSkip = aCrSkip;
- }
-};
-
-struct AudioFrame {
- int64_t mTimeUs;
- void *mData; // 16PCM interleaved
- size_t mSize; // Size of mData in bytes
- int32_t mAudioChannels;
- int32_t mAudioSampleRate;
-
- AudioFrame() :
- mTimeUs(0),
- mData(0),
- mSize(0),
- mAudioChannels(0),
- mAudioSampleRate(0)
- {
- }
-
- void Set(int64_t aTimeUs,
- void *aData, size_t aSize,
- int32_t aAudioChannels, int32_t aAudioSampleRate)
- {
- mTimeUs = aTimeUs;
- mData = aData;
- mSize = aSize;
- mAudioChannels = aAudioChannels;
- mAudioSampleRate = aAudioSampleRate;
- }
-};
-
-struct Decoder;
-
-struct PluginHost {
- bool (*Read)(Decoder *aDecoder, char *aBuffer, int64_t aOffset, uint32_t aCount, uint32_t* aBytes);
- uint64_t (*GetLength)(Decoder *aDecoder);
- void (*SetMetaDataReadMode)(Decoder *aDecoder);
- void (*SetPlaybackReadMode)(Decoder *aDecoder);
- bool (*GetIntPref)(const char *aPref, int32_t *aResult);
- bool (*GetSystemInfoString)(const char *aKey, char *aResult, size_t aResultLen);
-};
-
-struct Decoder {
- void *mResource;
- void *mPrivate;
-
- Decoder();
-
- void (*GetDuration)(Decoder *aDecoder, int64_t *durationUs);
- void (*GetVideoParameters)(Decoder *aDecoder, int32_t *aWidth, int32_t *aHeight);
- void (*GetAudioParameters)(Decoder *aDecoder, int32_t *aNumChannels, int32_t *aSampleRate);
- bool (*HasVideo)(Decoder *aDecoder);
- bool (*HasAudio)(Decoder *aDecoder);
- bool (*ReadVideo)(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback);
- bool (*ReadAudio)(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs);
- void (*DestroyDecoder)(Decoder *);
-};
-
-struct Manifest {
- bool (*CanDecode)(const char *aMimeChars, size_t aMimeLen, const char* const**aCodecs);
- bool (*CreateDecoder)(PluginHost *aPluginHost, Decoder *aDecoder,
- const char *aMimeChars, size_t aMimeLen);
-};
-
-}
-
-#endif
diff --git a/dom/media/android/moz.build b/dom/media/android/moz.build
deleted file mode 100644
index 3ad43cd50..000000000
--- a/dom/media/android/moz.build
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-EXPORTS += [
- 'AndroidMediaDecoder.h',
- 'AndroidMediaPluginHost.h',
- 'AndroidMediaReader.h',
- 'AndroidMediaResourceServer.h',
- 'MPAPI.h',
-]
-
-UNIFIED_SOURCES += [
- 'AndroidMediaDecoder.cpp',
- 'AndroidMediaPluginHost.cpp',
- 'AndroidMediaReader.cpp',
- 'AndroidMediaResourceServer.cpp',
-]
-
-LOCAL_INCLUDES += [
- '/dom/base',
- '/dom/html',
-]
-
-FINAL_LIBRARY = 'xul'
diff --git a/dom/media/directshow/AudioSinkFilter.cpp b/dom/media/directshow/AudioSinkFilter.cpp
deleted file mode 100644
index 9f23c0e00..000000000
--- a/dom/media/directshow/AudioSinkFilter.cpp
+++ /dev/null
@@ -1,285 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "SampleSink.h"
-#include "AudioSinkFilter.h"
-#include "AudioSinkInputPin.h"
-#include "VideoUtils.h"
-#include "mozilla/Logging.h"
-
-
-#include <initguid.h>
-#include <wmsdkidl.h>
-
-#define DELETE_RESET(p) { delete (p) ; (p) = nullptr ;}
-
-DEFINE_GUID(CLSID_MozAudioSinkFilter, 0x1872d8c8, 0xea8d, 0x4c34, 0xae, 0x96, 0x69, 0xde,
- 0xf1, 0x33, 0x7b, 0x33);
-
-using namespace mozilla::media;
-
-namespace mozilla {
-
-static LazyLogModule gDirectShowLog("DirectShowDecoder");
-#define LOG(...) MOZ_LOG(gDirectShowLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
-
-AudioSinkFilter::AudioSinkFilter(const wchar_t* aObjectName, HRESULT* aOutResult)
- : BaseFilter(aObjectName, CLSID_MozAudioSinkFilter),
- mFilterCritSec("AudioSinkFilter::mFilterCritSec")
-{
- (*aOutResult) = S_OK;
- mInputPin = new AudioSinkInputPin(L"AudioSinkInputPin",
- this,
- &mFilterCritSec,
- aOutResult);
-}
-
-AudioSinkFilter::~AudioSinkFilter()
-{
-}
-
-int
-AudioSinkFilter::GetPinCount()
-{
- return 1;
-}
-
-BasePin*
-AudioSinkFilter::GetPin(int aIndex)
-{
- CriticalSectionAutoEnter lockFilter(mFilterCritSec);
- return (aIndex == 0) ? static_cast<BasePin*>(mInputPin) : nullptr;
-}
-
-HRESULT
-AudioSinkFilter::Pause()
-{
- CriticalSectionAutoEnter lockFilter(mFilterCritSec);
- if (mState == State_Stopped) {
- // Change the state, THEN activate the input pin.
- mState = State_Paused;
- if (mInputPin && mInputPin->IsConnected()) {
- mInputPin->Active();
- }
- } else if (mState == State_Running) {
- mState = State_Paused;
- }
- return S_OK;
-}
-
-HRESULT
-AudioSinkFilter::Stop()
-{
- CriticalSectionAutoEnter lockFilter(mFilterCritSec);
- mState = State_Stopped;
- if (mInputPin) {
- mInputPin->Inactive();
- }
-
- GetSampleSink()->Flush();
-
- return S_OK;
-}
-
-HRESULT
-AudioSinkFilter::Run(REFERENCE_TIME tStart)
-{
- LOG("AudioSinkFilter::Run(%lld) [%4.2lf]",
- RefTimeToUsecs(tStart),
- double(RefTimeToUsecs(tStart)) / USECS_PER_S);
- return media::BaseFilter::Run(tStart);
-}
-
-HRESULT
-AudioSinkFilter::GetClassID( OUT CLSID * pCLSID )
-{
- (* pCLSID) = CLSID_MozAudioSinkFilter;
- return S_OK;
-}
-
-HRESULT
-AudioSinkFilter::QueryInterface(REFIID aIId, void **aInterface)
-{
- if (aIId == IID_IMediaSeeking) {
- *aInterface = static_cast<IMediaSeeking*>(this);
- AddRef();
- return S_OK;
- }
- return mozilla::media::BaseFilter::QueryInterface(aIId, aInterface);
-}
-
-ULONG
-AudioSinkFilter::AddRef()
-{
- return ::InterlockedIncrement(&mRefCnt);
-}
-
-ULONG
-AudioSinkFilter::Release()
-{
- unsigned long newRefCnt = ::InterlockedDecrement(&mRefCnt);
- if (!newRefCnt) {
- delete this;
- }
- return newRefCnt;
-}
-
-SampleSink*
-AudioSinkFilter::GetSampleSink()
-{
- return mInputPin->GetSampleSink();
-}
-
-
-// IMediaSeeking implementation.
-//
-// Calls to IMediaSeeking are forwarded to the output pin that the
-// AudioSinkInputPin is connected to, i.e. upstream towards the parser and
-// source filters, which actually implement seeking.
-#define ENSURE_CONNECTED_PIN_SEEKING \
- if (!mInputPin) { \
- return E_NOTIMPL; \
- } \
- RefPtr<IMediaSeeking> pinSeeking = mInputPin->GetConnectedPinSeeking(); \
- if (!pinSeeking) { \
- return E_NOTIMPL; \
- }
-
-HRESULT
-AudioSinkFilter::GetCapabilities(DWORD* aCapabilities)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetCapabilities(aCapabilities);
-}
-
-HRESULT
-AudioSinkFilter::CheckCapabilities(DWORD* aCapabilities)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->CheckCapabilities(aCapabilities);
-}
-
-HRESULT
-AudioSinkFilter::IsFormatSupported(const GUID* aFormat)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->IsFormatSupported(aFormat);
-}
-
-HRESULT
-AudioSinkFilter::QueryPreferredFormat(GUID* aFormat)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->QueryPreferredFormat(aFormat);
-}
-
-HRESULT
-AudioSinkFilter::GetTimeFormat(GUID* aFormat)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetTimeFormat(aFormat);
-}
-
-HRESULT
-AudioSinkFilter::IsUsingTimeFormat(const GUID* aFormat)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->IsUsingTimeFormat(aFormat);
-}
-
-HRESULT
-AudioSinkFilter::SetTimeFormat(const GUID* aFormat)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->SetTimeFormat(aFormat);
-}
-
-HRESULT
-AudioSinkFilter::GetDuration(LONGLONG* aDuration)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetDuration(aDuration);
-}
-
-HRESULT
-AudioSinkFilter::GetStopPosition(LONGLONG* aStop)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetStopPosition(aStop);
-}
-
-HRESULT
-AudioSinkFilter::GetCurrentPosition(LONGLONG* aCurrent)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetCurrentPosition(aCurrent);
-}
-
-HRESULT
-AudioSinkFilter::ConvertTimeFormat(LONGLONG* aTarget,
- const GUID* aTargetFormat,
- LONGLONG aSource,
- const GUID* aSourceFormat)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->ConvertTimeFormat(aTarget,
- aTargetFormat,
- aSource,
- aSourceFormat);
-}
-
-HRESULT
-AudioSinkFilter::SetPositions(LONGLONG* aCurrent,
- DWORD aCurrentFlags,
- LONGLONG* aStop,
- DWORD aStopFlags)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->SetPositions(aCurrent,
- aCurrentFlags,
- aStop,
- aStopFlags);
-}
-
-HRESULT
-AudioSinkFilter::GetPositions(LONGLONG* aCurrent,
- LONGLONG* aStop)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetPositions(aCurrent, aStop);
-}
-
-HRESULT
-AudioSinkFilter::GetAvailable(LONGLONG* aEarliest,
- LONGLONG* aLatest)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetAvailable(aEarliest, aLatest);
-}
-
-HRESULT
-AudioSinkFilter::SetRate(double aRate)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->SetRate(aRate);
-}
-
-HRESULT
-AudioSinkFilter::GetRate(double* aRate)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetRate(aRate);
-}
-
-HRESULT
-AudioSinkFilter::GetPreroll(LONGLONG* aPreroll)
-{
- ENSURE_CONNECTED_PIN_SEEKING
- return pinSeeking->GetPreroll(aPreroll);
-}
-
-} // namespace mozilla
-
diff --git a/dom/media/directshow/AudioSinkFilter.h b/dom/media/directshow/AudioSinkFilter.h
deleted file mode 100644
index 85abdfccf..000000000
--- a/dom/media/directshow/AudioSinkFilter.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#if !defined(AudioSinkFilter_h_)
-#define AudioSinkFilter_h_
-
-#include "BaseFilter.h"
-#include "DirectShowUtils.h"
-#include "nsAutoPtr.h"
-#include "mozilla/RefPtr.h"
-
-namespace mozilla {
-
-class AudioSinkInputPin;
-class SampleSink;
-
-// Filter that acts as the end of the graph. Audio samples input into
-// this filter block the calling thread, and the calling thread is
-// unblocked when the decode thread extracts the sample. The samples
-// input into this filter are stored in the SampleSink, where the blocking
-// is implemented. The input pin owns the SampleSink.
-class AudioSinkFilter: public mozilla::media::BaseFilter,
- public IMediaSeeking
-{
-
-public:
- AudioSinkFilter(const wchar_t* aObjectName, HRESULT* aOutResult);
- virtual ~AudioSinkFilter();
-
- // Gets the input pin's sample sink.
- SampleSink* GetSampleSink();
-
- // IUnknown implementation.
- STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface);
- STDMETHODIMP_(ULONG) AddRef();
- STDMETHODIMP_(ULONG) Release();
-
- // --------------------------------------------------------------------
- // CBaseFilter methods
- int GetPinCount ();
- mozilla::media::BasePin* GetPin ( IN int Index);
- STDMETHODIMP Pause ();
- STDMETHODIMP Stop ();
- STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
- STDMETHODIMP Run(REFERENCE_TIME tStart);
- // IMediaSeeking Methods...
-
- // We defer to SourceFilter, but we must expose the interface on
- // the output pins. Seeking commands come upstream from the renderers,
- // but they must be actioned at the source filters.
- STDMETHODIMP GetCapabilities(DWORD* aCapabilities);
- STDMETHODIMP CheckCapabilities(DWORD* aCapabilities);
- STDMETHODIMP IsFormatSupported(const GUID* aFormat);
- STDMETHODIMP QueryPreferredFormat(GUID* aFormat);
- STDMETHODIMP GetTimeFormat(GUID* aFormat);
- STDMETHODIMP IsUsingTimeFormat(const GUID* aFormat);
- STDMETHODIMP SetTimeFormat(const GUID* aFormat);
- STDMETHODIMP GetDuration(LONGLONG* pDuration);
- STDMETHODIMP GetStopPosition(LONGLONG* pStop);
- STDMETHODIMP GetCurrentPosition(LONGLONG* aCurrent);
- STDMETHODIMP ConvertTimeFormat(LONGLONG* aTarget,
- const GUID* aTargetFormat,
- LONGLONG aSource,
- const GUID* aSourceFormat);
- STDMETHODIMP SetPositions(LONGLONG* aCurrent,
- DWORD aCurrentFlags,
- LONGLONG* aStop,
- DWORD aStopFlags);
- STDMETHODIMP GetPositions(LONGLONG* aCurrent,
- LONGLONG* aStop);
- STDMETHODIMP GetAvailable(LONGLONG* aEarliest,
- LONGLONG* aLatest);
- STDMETHODIMP SetRate(double aRate);
- STDMETHODIMP GetRate(double* aRate);
- STDMETHODIMP GetPreroll(LONGLONG* aPreroll);
-
- // --------------------------------------------------------------------
- // class factory calls this
- static IUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
-
-private:
- CriticalSection mFilterCritSec;
-
- // Note: The input pin defers its refcounting to the sink filter, so when
- // the input pin is addrefed, what actually happens is the sink filter is
- // addrefed.
- nsAutoPtr<AudioSinkInputPin> mInputPin;
-};
-
-} // namespace mozilla
-
-#endif // AudioSinkFilter_h_
diff --git a/dom/media/directshow/AudioSinkInputPin.cpp b/dom/media/directshow/AudioSinkInputPin.cpp
deleted file mode 100644
index 85a6e3da3..000000000
--- a/dom/media/directshow/AudioSinkInputPin.cpp
+++ /dev/null
@@ -1,195 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "AudioSinkInputPin.h"
-#include "AudioSinkFilter.h"
-#include "SampleSink.h"
-#include "mozilla/Logging.h"
-
-#include <wmsdkidl.h>
-
-using namespace mozilla::media;
-
-namespace mozilla {
-
-static LazyLogModule gDirectShowLog("DirectShowDecoder");
-#define LOG(...) MOZ_LOG(gDirectShowLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
-
-AudioSinkInputPin::AudioSinkInputPin(wchar_t* aObjectName,
- AudioSinkFilter* aFilter,
- mozilla::CriticalSection* aLock,
- HRESULT* aOutResult)
- : BaseInputPin(aObjectName, aFilter, aLock, aOutResult, aObjectName),
- mSegmentStartTime(0)
-{
- MOZ_COUNT_CTOR(AudioSinkInputPin);
- mSampleSink = new SampleSink();
-}
-
-AudioSinkInputPin::~AudioSinkInputPin()
-{
- MOZ_COUNT_DTOR(AudioSinkInputPin);
-}
-
-HRESULT
-AudioSinkInputPin::GetMediaType(int aPosition, MediaType* aOutMediaType)
-{
- NS_ENSURE_TRUE(aPosition >= 0, E_INVALIDARG);
- NS_ENSURE_TRUE(aOutMediaType, E_POINTER);
-
- if (aPosition > 0) {
- return S_FALSE;
- }
-
- // Note: We set output as PCM, as IEEE_FLOAT only works when using the
- // MP3 decoder as an MFT, and we can't do that while using DirectShow.
- aOutMediaType->SetType(&MEDIATYPE_Audio);
- aOutMediaType->SetSubtype(&MEDIASUBTYPE_PCM);
- aOutMediaType->SetType(&FORMAT_WaveFormatEx);
- aOutMediaType->SetTemporalCompression(FALSE);
-
- return S_OK;
-}
-
-HRESULT
-AudioSinkInputPin::CheckMediaType(const MediaType* aMediaType)
-{
- if (!aMediaType) {
- return E_INVALIDARG;
- }
-
- GUID majorType = *aMediaType->Type();
- if (majorType != MEDIATYPE_Audio && majorType != WMMEDIATYPE_Audio) {
- return E_INVALIDARG;
- }
-
- if (*aMediaType->Subtype() != MEDIASUBTYPE_PCM) {
- return E_INVALIDARG;
- }
-
- if (*aMediaType->FormatType() != FORMAT_WaveFormatEx) {
- return E_INVALIDARG;
- }
-
- // We accept the media type, stash its layout format!
- WAVEFORMATEX* wfx = (WAVEFORMATEX*)(aMediaType->pbFormat);
- GetSampleSink()->SetAudioFormat(wfx);
-
- return S_OK;
-}
-
-AudioSinkFilter*
-AudioSinkInputPin::GetAudioSinkFilter()
-{
- return reinterpret_cast<AudioSinkFilter*>(mFilter);
-}
-
-SampleSink*
-AudioSinkInputPin::GetSampleSink()
-{
- return mSampleSink;
-}
-
-HRESULT
-AudioSinkInputPin::SetAbsoluteMediaTime(IMediaSample* aSample)
-{
- HRESULT hr;
- REFERENCE_TIME start = 0, end = 0;
- hr = aSample->GetTime(&start, &end);
- NS_ENSURE_TRUE(SUCCEEDED(hr), E_FAIL);
- {
- CriticalSectionAutoEnter lock(*mLock);
- start += mSegmentStartTime;
- end += mSegmentStartTime;
- }
- hr = aSample->SetMediaTime(&start, &end);
- NS_ENSURE_TRUE(SUCCEEDED(hr), E_FAIL);
- return S_OK;
-}
-
-HRESULT
-AudioSinkInputPin::Receive(IMediaSample* aSample )
-{
- HRESULT hr;
- NS_ENSURE_TRUE(aSample, E_POINTER);
-
- hr = BaseInputPin::Receive(aSample);
- if (SUCCEEDED(hr) && hr != S_FALSE) { // S_FALSE == flushing
- // Set the timestamp of the sample after being adjusted for
- // seeking/segments in the "media time" attribute. When we seek,
- // DirectShow starts a new "segment", and starts labeling samples
- // from time=0 again, so we need to correct for this to get the
- // actual timestamps after seeking.
- hr = SetAbsoluteMediaTime(aSample);
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- hr = GetSampleSink()->Receive(aSample);
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
- }
- return S_OK;
-}
-
-already_AddRefed<IMediaSeeking>
-AudioSinkInputPin::GetConnectedPinSeeking()
-{
- RefPtr<IPin> peer = GetConnected();
- if (!peer)
- return nullptr;
- RefPtr<IMediaSeeking> seeking;
- peer->QueryInterface(static_cast<IMediaSeeking**>(getter_AddRefs(seeking)));
- return seeking.forget();
-}
-
-HRESULT
-AudioSinkInputPin::BeginFlush()
-{
- HRESULT hr = media::BaseInputPin::BeginFlush();
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- GetSampleSink()->Flush();
-
- return S_OK;
-}
-
-HRESULT
-AudioSinkInputPin::EndFlush()
-{
- HRESULT hr = media::BaseInputPin::EndFlush();
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- // Reset the EOS flag, so that if we're called after a seek we still work.
- GetSampleSink()->Reset();
-
- return S_OK;
-}
-
-HRESULT
-AudioSinkInputPin::EndOfStream(void)
-{
- HRESULT hr = media::BaseInputPin::EndOfStream();
- if (FAILED(hr) || hr == S_FALSE) {
- // Pin is stil flushing.
- return hr;
- }
- GetSampleSink()->SetEOS();
-
- return S_OK;
-}
-
-
-HRESULT
-AudioSinkInputPin::NewSegment(REFERENCE_TIME tStart,
- REFERENCE_TIME tStop,
- double dRate)
-{
- CriticalSectionAutoEnter lock(*mLock);
- // Record the start time of the new segment, so that we can store the
- // correct absolute timestamp in the "media time" each incoming sample.
- mSegmentStartTime = tStart;
- return S_OK;
-}
-
-} // namespace mozilla
-
diff --git a/dom/media/directshow/AudioSinkInputPin.h b/dom/media/directshow/AudioSinkInputPin.h
deleted file mode 100644
index 80503c641..000000000
--- a/dom/media/directshow/AudioSinkInputPin.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#if !defined(AudioSinkInputPin_h_)
-#define AudioSinkInputPin_h_
-
-#include "BaseInputPin.h"
-#include "DirectShowUtils.h"
-#include "mozilla/RefPtr.h"
-#include "nsAutoPtr.h"
-
-namespace mozilla {
-
-namespace media {
- class MediaType;
-}
-
-class AudioSinkFilter;
-class SampleSink;
-
-
-// Input pin for capturing audio output of a DirectShow filter graph.
-// This is the input pin for the AudioSinkFilter.
-class AudioSinkInputPin: public mozilla::media::BaseInputPin
-{
-public:
- AudioSinkInputPin(wchar_t* aObjectName,
- AudioSinkFilter* aFilter,
- mozilla::CriticalSection* aLock,
- HRESULT* aOutResult);
- virtual ~AudioSinkInputPin();
-
- HRESULT GetMediaType (IN int iPos, OUT mozilla::media::MediaType * pmt);
- HRESULT CheckMediaType (IN const mozilla::media::MediaType * pmt);
- STDMETHODIMP Receive (IN IMediaSample *);
- STDMETHODIMP BeginFlush() override;
- STDMETHODIMP EndFlush() override;
-
- // Called when we start decoding a new segment, that happens directly after
- // a seek. This captures the segment's start time. Samples decoded by the
- // MP3 decoder have their timestamps offset from the segment start time.
- // Storing the segment start time enables us to set each sample's MediaTime
- // as an offset in the stream relative to the start of the stream, rather
- // than the start of the segment, i.e. its absolute time in the stream.
- STDMETHODIMP NewSegment(REFERENCE_TIME tStart,
- REFERENCE_TIME tStop,
- double dRate) override;
-
- STDMETHODIMP EndOfStream() override;
-
- // Returns the IMediaSeeking interface of the connected output pin.
- // We forward seeking requests upstream from the sink to the source
- // filters.
- already_AddRefed<IMediaSeeking> GetConnectedPinSeeking();
-
- SampleSink* GetSampleSink();
-
-private:
- AudioSinkFilter* GetAudioSinkFilter();
-
- // Sets the media time on the media sample, relative to the segment
- // start time.
- HRESULT SetAbsoluteMediaTime(IMediaSample* aSample);
-
- nsAutoPtr<SampleSink> mSampleSink;
-
- // Synchronized by the filter lock; BaseInputPin::mLock.
- REFERENCE_TIME mSegmentStartTime;
-};
-
-} // namespace mozilla
-
-#endif // AudioSinkInputPin_h_
diff --git a/dom/media/directshow/DirectShowDecoder.cpp b/dom/media/directshow/DirectShowDecoder.cpp
deleted file mode 100644
index da68b4daa..000000000
--- a/dom/media/directshow/DirectShowDecoder.cpp
+++ /dev/null
@@ -1,65 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "DirectShowDecoder.h"
-#include "DirectShowReader.h"
-#include "DirectShowUtils.h"
-#include "MediaDecoderStateMachine.h"
-#include "mozilla/Preferences.h"
-#include "mozilla/WindowsVersion.h"
-
-namespace mozilla {
-
-MediaDecoderStateMachine* DirectShowDecoder::CreateStateMachine()
-{
- return new MediaDecoderStateMachine(this, new DirectShowReader(this));
-}
-
-/* static */
-bool
-DirectShowDecoder::GetSupportedCodecs(const nsACString& aType,
- char const *const ** aCodecList)
-{
- if (!IsEnabled()) {
- return false;
- }
-
- static char const *const mp3AudioCodecs[] = {
- "mp3",
- nullptr
- };
- if (aType.EqualsASCII("audio/mpeg") ||
- aType.EqualsASCII("audio/mp3")) {
- if (aCodecList) {
- *aCodecList = mp3AudioCodecs;
- }
- return true;
- }
-
- return false;
-}
-
-/* static */
-bool
-DirectShowDecoder::IsEnabled()
-{
- return CanDecodeMP3UsingDirectShow() &&
- Preferences::GetBool("media.directshow.enabled");
-}
-
-DirectShowDecoder::DirectShowDecoder(MediaDecoderOwner* aOwner)
- : MediaDecoder(aOwner)
-{
- MOZ_COUNT_CTOR(DirectShowDecoder);
-}
-
-DirectShowDecoder::~DirectShowDecoder()
-{
- MOZ_COUNT_DTOR(DirectShowDecoder);
-}
-
-} // namespace mozilla
-
diff --git a/dom/media/directshow/DirectShowDecoder.h b/dom/media/directshow/DirectShowDecoder.h
deleted file mode 100644
index c4d371fbf..000000000
--- a/dom/media/directshow/DirectShowDecoder.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#if !defined(DirectShowDecoder_h_)
-#define DirectShowDecoder_h_
-
-#include "MediaDecoder.h"
-
-namespace mozilla {
-
-// Decoder that uses DirectShow to playback MP3 files only.
-class DirectShowDecoder : public MediaDecoder
-{
-public:
-
- explicit DirectShowDecoder(MediaDecoderOwner* aOwner);
- virtual ~DirectShowDecoder();
-
- MediaDecoder* Clone(MediaDecoderOwner* aOwner) override {
- if (!IsEnabled()) {
- return nullptr;
- }
- return new DirectShowDecoder(aOwner);
- }
-
- MediaDecoderStateMachine* CreateStateMachine() override;
-
- // Returns true if aType is a MIME type that we render with the
- // DirectShow backend. If aCodecList is non null,
- // it is filled with a (static const) null-terminated list of strings
- // denoting the codecs we'll playback. Note that playback is strictly
- // limited to MP3 only.
- static bool GetSupportedCodecs(const nsACString& aType,
- char const *const ** aCodecList);
-
- // Returns true if the DirectShow backend is preffed on.
- static bool IsEnabled();
-};
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/directshow/DirectShowReader.cpp b/dom/media/directshow/DirectShowReader.cpp
deleted file mode 100644
index cacf6f8de..000000000
--- a/dom/media/directshow/DirectShowReader.cpp
+++ /dev/null
@@ -1,360 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "DirectShowReader.h"
-#include "MediaDecoderReader.h"
-#include "mozilla/RefPtr.h"
-#include "DirectShowUtils.h"
-#include "AudioSinkFilter.h"
-#include "SourceFilter.h"
-#include "SampleSink.h"
-#include "VideoUtils.h"
-
-using namespace mozilla::media;
-
-namespace mozilla {
-
-// Windows XP's MP3 decoder filter. This is available on XP only, on Vista
-// and later we can use the DMO Wrapper filter and MP3 decoder DMO.
-const GUID DirectShowReader::CLSID_MPEG_LAYER_3_DECODER_FILTER =
-{ 0x38BE3000, 0xDBF4, 0x11D0, {0x86, 0x0E, 0x00, 0xA0, 0x24, 0xCF, 0xEF, 0x6D} };
-
-
-static LazyLogModule gDirectShowLog("DirectShowDecoder");
-#define LOG(...) MOZ_LOG(gDirectShowLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
-
-DirectShowReader::DirectShowReader(AbstractMediaDecoder* aDecoder)
- : MediaDecoderReader(aDecoder),
- mMP3FrameParser(aDecoder->GetResource()->GetLength()),
-#ifdef DIRECTSHOW_REGISTER_GRAPH
- mRotRegister(0),
-#endif
- mNumChannels(0),
- mAudioRate(0),
- mBytesPerSample(0)
-{
- MOZ_ASSERT(NS_IsMainThread(), "Must be on main thread.");
- MOZ_COUNT_CTOR(DirectShowReader);
-}
-
-DirectShowReader::~DirectShowReader()
-{
- MOZ_ASSERT(NS_IsMainThread(), "Must be on main thread.");
- MOZ_COUNT_DTOR(DirectShowReader);
-#ifdef DIRECTSHOW_REGISTER_GRAPH
- if (mRotRegister) {
- RemoveGraphFromRunningObjectTable(mRotRegister);
- }
-#endif
-}
-
-// Try to parse the MP3 stream to make sure this is indeed an MP3, get the
-// estimated duration of the stream, and find the offset of the actual MP3
-// frames in the stream, as DirectShow doesn't like large ID3 sections.
-static nsresult
-ParseMP3Headers(MP3FrameParser *aParser, MediaResource *aResource)
-{
- const uint32_t MAX_READ_SIZE = 4096;
-
- uint64_t offset = 0;
- while (aParser->NeedsData() && !aParser->ParsedHeaders()) {
- uint32_t bytesRead;
- char buffer[MAX_READ_SIZE];
- nsresult rv = aResource->ReadAt(offset, buffer,
- MAX_READ_SIZE, &bytesRead);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (!bytesRead) {
- // End of stream.
- return NS_ERROR_FAILURE;
- }
-
- aParser->Parse(reinterpret_cast<uint8_t*>(buffer), bytesRead, offset);
- offset += bytesRead;
- }
-
- return aParser->IsMP3() ? NS_OK : NS_ERROR_FAILURE;
-}
-
-nsresult
-DirectShowReader::ReadMetadata(MediaInfo* aInfo,
- MetadataTags** aTags)
-{
- MOZ_ASSERT(OnTaskQueue());
- HRESULT hr;
- nsresult rv;
-
- // Create the filter graph, reference it by the GraphBuilder interface,
- // to make graph building more convenient.
- hr = CoCreateInstance(CLSID_FilterGraph,
- nullptr,
- CLSCTX_INPROC_SERVER,
- IID_IGraphBuilder,
- reinterpret_cast<void**>(static_cast<IGraphBuilder**>(getter_AddRefs(mGraph))));
- NS_ENSURE_TRUE(SUCCEEDED(hr) && mGraph, NS_ERROR_FAILURE);
-
- rv = ParseMP3Headers(&mMP3FrameParser, mDecoder->GetResource());
- NS_ENSURE_SUCCESS(rv, rv);
-
- #ifdef DIRECTSHOW_REGISTER_GRAPH
- hr = AddGraphToRunningObjectTable(mGraph, &mRotRegister);
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
- #endif
-
- // Extract the interface pointers we'll need from the filter graph.
- hr = mGraph->QueryInterface(static_cast<IMediaControl**>(getter_AddRefs(mControl)));
- NS_ENSURE_TRUE(SUCCEEDED(hr) && mControl, NS_ERROR_FAILURE);
-
- hr = mGraph->QueryInterface(static_cast<IMediaSeeking**>(getter_AddRefs(mMediaSeeking)));
- NS_ENSURE_TRUE(SUCCEEDED(hr) && mMediaSeeking, NS_ERROR_FAILURE);
-
- // Build the graph. Create the filters we need, and connect them. We
- // build the entire graph ourselves to prevent other decoders installed
- // on the system being created and used.
-
- // Our source filters, wraps the MediaResource.
- mSourceFilter = new SourceFilter(MEDIATYPE_Stream, MEDIASUBTYPE_MPEG1Audio);
- NS_ENSURE_TRUE(mSourceFilter, NS_ERROR_FAILURE);
-
- rv = mSourceFilter->Init(mDecoder->GetResource(), mMP3FrameParser.GetMP3Offset());
- NS_ENSURE_SUCCESS(rv, rv);
-
- hr = mGraph->AddFilter(mSourceFilter, L"MozillaDirectShowSource");
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- // The MPEG demuxer.
- RefPtr<IBaseFilter> demuxer;
- hr = CreateAndAddFilter(mGraph,
- CLSID_MPEG1Splitter,
- L"MPEG1Splitter",
- getter_AddRefs(demuxer));
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- // Platform MP3 decoder.
- RefPtr<IBaseFilter> decoder;
- // Firstly try to create the MP3 decoder filter that ships with WinXP
- // directly. This filter doesn't normally exist on later versions of
- // Windows.
- hr = CreateAndAddFilter(mGraph,
- CLSID_MPEG_LAYER_3_DECODER_FILTER,
- L"MPEG Layer 3 Decoder",
- getter_AddRefs(decoder));
- if (FAILED(hr)) {
- // Failed to create MP3 decoder filter. Try to instantiate
- // the MP3 decoder DMO.
- hr = AddMP3DMOWrapperFilter(mGraph, getter_AddRefs(decoder));
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
- }
-
- // Sink, captures audio samples and inserts them into our pipeline.
- static const wchar_t* AudioSinkFilterName = L"MozAudioSinkFilter";
- mAudioSinkFilter = new AudioSinkFilter(AudioSinkFilterName, &hr);
- NS_ENSURE_TRUE(mAudioSinkFilter && SUCCEEDED(hr), NS_ERROR_FAILURE);
- hr = mGraph->AddFilter(mAudioSinkFilter, AudioSinkFilterName);
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- // Join the filters.
- hr = ConnectFilters(mGraph, mSourceFilter, demuxer);
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- hr = ConnectFilters(mGraph, demuxer, decoder);
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- hr = ConnectFilters(mGraph, decoder, mAudioSinkFilter);
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- WAVEFORMATEX format;
- mAudioSinkFilter->GetSampleSink()->GetAudioFormat(&format);
- NS_ENSURE_TRUE(format.wFormatTag == WAVE_FORMAT_PCM, NS_ERROR_FAILURE);
-
- mInfo.mAudio.mChannels = mNumChannels = format.nChannels;
- mInfo.mAudio.mRate = mAudioRate = format.nSamplesPerSec;
- mInfo.mAudio.mBitDepth = format.wBitsPerSample;
- mBytesPerSample = format.wBitsPerSample / 8;
-
- // Begin decoding!
- hr = mControl->Run();
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- DWORD seekCaps = 0;
- hr = mMediaSeeking->GetCapabilities(&seekCaps);
- mInfo.mMediaSeekable = SUCCEEDED(hr) && (AM_SEEKING_CanSeekAbsolute & seekCaps);
-
- int64_t duration = mMP3FrameParser.GetDuration();
- if (SUCCEEDED(hr)) {
- mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(duration));
- }
-
- LOG("Successfully initialized DirectShow MP3 decoder.");
- LOG("Channels=%u Hz=%u duration=%lld bytesPerSample=%d",
- mInfo.mAudio.mChannels,
- mInfo.mAudio.mRate,
- RefTimeToUsecs(duration),
- mBytesPerSample);
-
- *aInfo = mInfo;
- // Note: The SourceFilter strips ID3v2 tags out of the stream.
- *aTags = nullptr;
-
- return NS_OK;
-}
-
-inline float
-UnsignedByteToAudioSample(uint8_t aValue)
-{
- return aValue * (2.0f / UINT8_MAX) - 1.0f;
-}
-
-bool
-DirectShowReader::Finish(HRESULT aStatus)
-{
- MOZ_ASSERT(OnTaskQueue());
-
- LOG("DirectShowReader::Finish(0x%x)", aStatus);
- // Notify the filter graph of end of stream.
- RefPtr<IMediaEventSink> eventSink;
- HRESULT hr = mGraph->QueryInterface(static_cast<IMediaEventSink**>(getter_AddRefs(eventSink)));
- if (SUCCEEDED(hr) && eventSink) {
- eventSink->Notify(EC_COMPLETE, aStatus, 0);
- }
- return false;
-}
-
-class DirectShowCopy
-{
-public:
- DirectShowCopy(uint8_t *aSource, uint32_t aBytesPerSample,
- uint32_t aSamples, uint32_t aChannels)
- : mSource(aSource)
- , mBytesPerSample(aBytesPerSample)
- , mSamples(aSamples)
- , mChannels(aChannels)
- , mNextSample(0)
- { }
-
- uint32_t operator()(AudioDataValue *aBuffer, uint32_t aSamples)
- {
- uint32_t maxSamples = std::min(aSamples, mSamples - mNextSample);
- uint32_t frames = maxSamples / mChannels;
- size_t byteOffset = mNextSample * mBytesPerSample;
- if (mBytesPerSample == 1) {
- for (uint32_t i = 0; i < maxSamples; ++i) {
- uint8_t *sample = mSource + byteOffset;
- aBuffer[i] = UnsignedByteToAudioSample(*sample);
- byteOffset += mBytesPerSample;
- }
- } else if (mBytesPerSample == 2) {
- for (uint32_t i = 0; i < maxSamples; ++i) {
- int16_t *sample = reinterpret_cast<int16_t *>(mSource + byteOffset);
- aBuffer[i] = AudioSampleToFloat(*sample);
- byteOffset += mBytesPerSample;
- }
- }
- mNextSample += maxSamples;
- return frames;
- }
-
-private:
- uint8_t * const mSource;
- const uint32_t mBytesPerSample;
- const uint32_t mSamples;
- const uint32_t mChannels;
- uint32_t mNextSample;
-};
-
-bool
-DirectShowReader::DecodeAudioData()
-{
- MOZ_ASSERT(OnTaskQueue());
- HRESULT hr;
-
- SampleSink* sink = mAudioSinkFilter->GetSampleSink();
- if (sink->AtEOS()) {
- // End of stream.
- return Finish(S_OK);
- }
-
- // Get the next chunk of audio samples. This blocks until the sample
- // arrives, or an error occurs (like the stream is shutdown).
- RefPtr<IMediaSample> sample;
- hr = sink->Extract(sample);
- if (FAILED(hr) || hr == S_FALSE) {
- return Finish(hr);
- }
-
- int64_t start = 0, end = 0;
- sample->GetMediaTime(&start, &end);
- LOG("DirectShowReader::DecodeAudioData [%4.2lf-%4.2lf]",
- RefTimeToSeconds(start),
- RefTimeToSeconds(end));
-
- LONG length = sample->GetActualDataLength();
- LONG numSamples = length / mBytesPerSample;
- LONG numFrames = length / mBytesPerSample / mNumChannels;
-
- BYTE* data = nullptr;
- hr = sample->GetPointer(&data);
- NS_ENSURE_TRUE(SUCCEEDED(hr), Finish(hr));
-
- mAudioCompactor.Push(mDecoder->GetResource()->Tell(),
- RefTimeToUsecs(start),
- mInfo.mAudio.mRate,
- numFrames,
- mNumChannels,
- DirectShowCopy(reinterpret_cast<uint8_t *>(data),
- mBytesPerSample,
- numSamples,
- mNumChannels));
- return true;
-}
-
-bool
-DirectShowReader::DecodeVideoFrame(bool &aKeyframeSkip,
- int64_t aTimeThreshold)
-{
- MOZ_ASSERT(OnTaskQueue());
- return false;
-}
-
-RefPtr<MediaDecoderReader::SeekPromise>
-DirectShowReader::Seek(SeekTarget aTarget, int64_t aEndTime)
-{
- nsresult res = SeekInternal(aTarget.GetTime().ToMicroseconds());
- if (NS_FAILED(res)) {
- return SeekPromise::CreateAndReject(res, __func__);
- } else {
- return SeekPromise::CreateAndResolve(aTarget.GetTime(), __func__);
- }
-}
-
-nsresult
-DirectShowReader::SeekInternal(int64_t aTargetUs)
-{
- HRESULT hr;
- MOZ_ASSERT(OnTaskQueue());
-
- LOG("DirectShowReader::Seek() target=%lld", aTargetUs);
-
- hr = mControl->Pause();
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- nsresult rv = ResetDecode();
- NS_ENSURE_SUCCESS(rv, rv);
-
- LONGLONG seekPosition = UsecsToRefTime(aTargetUs);
- hr = mMediaSeeking->SetPositions(&seekPosition,
- AM_SEEKING_AbsolutePositioning,
- nullptr,
- AM_SEEKING_NoPositioning);
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- hr = mControl->Run();
- NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
-
- return NS_OK;
-}
-
-} // namespace mozilla
diff --git a/dom/media/directshow/DirectShowReader.h b/dom/media/directshow/DirectShowReader.h
deleted file mode 100644
index e1326d416..000000000
--- a/dom/media/directshow/DirectShowReader.h
+++ /dev/null
@@ -1,110 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#if !defined(DirectShowReader_h_)
-#define DirectShowReader_h_
-
-#include "windows.h" // HRESULT, DWORD
-#include "MediaDecoderReader.h"
-#include "MediaResource.h"
-#include "mozilla/RefPtr.h"
-#include "MP3FrameParser.h"
-
-// Add the graph to the Running Object Table so that we can connect
-// to this graph with GraphEdit/GraphStudio. Note: on Vista and up you must
-// also regsvr32 proppage.dll from the Windows SDK.
-// See: http://msdn.microsoft.com/en-us/library/ms787252(VS.85).aspx
-// #define DIRECTSHOW_REGISTER_GRAPH
-
-struct IGraphBuilder;
-struct IMediaControl;
-struct IMediaSeeking;
-
-namespace mozilla {
-
-class AudioSinkFilter;
-class SourceFilter;
-
-// Decoder backend for decoding MP3 using DirectShow. DirectShow operates as
-// a filter graph. The basic design of the DirectShowReader is that we have
-// a SourceFilter that wraps the MediaResource that connects to the
-// MP3 decoder filter. The MP3 decoder filter "pulls" data as it requires it
-// downstream on its own thread. When the MP3 decoder has produced a block of
-// decoded samples, its thread calls downstream into our AudioSinkFilter,
-// passing the decoded buffer in. The AudioSinkFilter inserts the samples into
-// a SampleSink object. The SampleSink blocks the MP3 decoder's thread until
-// the decode thread calls DecodeAudioData(), whereupon the SampleSink
-// releases the decoded samples to the decode thread, and unblocks the MP3
-// decoder's thread. The MP3 decoder can then request more data from the
-// SourceFilter, and decode more data. If the decode thread calls
-// DecodeAudioData() and there's no decoded samples waiting to be extracted
-// in the SampleSink, the SampleSink blocks the decode thread until the MP3
-// decoder produces a decoded sample.
-class DirectShowReader : public MediaDecoderReader
-{
-public:
- DirectShowReader(AbstractMediaDecoder* aDecoder);
-
- virtual ~DirectShowReader();
-
- bool DecodeAudioData() override;
- bool DecodeVideoFrame(bool &aKeyframeSkip,
- int64_t aTimeThreshold) override;
-
- nsresult ReadMetadata(MediaInfo* aInfo,
- MetadataTags** aTags) override;
-
- RefPtr<SeekPromise>
- Seek(SeekTarget aTarget, int64_t aEndTime) override;
-
- static const GUID CLSID_MPEG_LAYER_3_DECODER_FILTER;
-
-private:
- // Notifies the filter graph that playback is complete. aStatus is
- // the code to send to the filter graph. Always returns false, so
- // that we can just "return Finish()" from DecodeAudioData().
- bool Finish(HRESULT aStatus);
-
- nsresult SeekInternal(int64_t aTime);
-
- // DirectShow filter graph, and associated playback and seeking
- // control interfaces.
- RefPtr<IGraphBuilder> mGraph;
- RefPtr<IMediaControl> mControl;
- RefPtr<IMediaSeeking> mMediaSeeking;
-
- // Wraps the MediaResource, and feeds undecoded data into the filter graph.
- RefPtr<SourceFilter> mSourceFilter;
-
- // Sits at the end of the graph, removing decoded samples from the graph.
- // The graph will block while this is blocked, i.e. it will pause decoding.
- RefPtr<AudioSinkFilter> mAudioSinkFilter;
-
- // Some MP3s are variable bitrate, so DirectShow's duration estimation
- // can make its duration estimation based on the wrong bitrate. So we parse
- // the MP3 frames to get a more accuate estimate of the duration.
- MP3FrameParser mMP3FrameParser;
-
-#ifdef DIRECTSHOW_REGISTER_GRAPH
- // Used to add/remove the filter graph to the Running Object Table. You can
- // connect GraphEdit/GraphStudio to the graph to observe and/or debug its
- // topology and state.
- DWORD mRotRegister;
-#endif
-
- // Number of channels in the audio stream.
- uint32_t mNumChannels;
-
- // Samples per second in the audio stream.
- uint32_t mAudioRate;
-
- // Number of bytes per sample. Can be either 1 or 2.
- uint32_t mBytesPerSample;
-};
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/directshow/DirectShowUtils.cpp b/dom/media/directshow/DirectShowUtils.cpp
deleted file mode 100644
index b2afa7528..000000000
--- a/dom/media/directshow/DirectShowUtils.cpp
+++ /dev/null
@@ -1,369 +0,0 @@
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "DirectShowUtils.h"
-#include "dmodshow.h"
-#include "wmcodecdsp.h"
-#include "dmoreg.h"
-#include "mozilla/ArrayUtils.h"
-#include "mozilla/RefPtr.h"
-#include "nsPrintfCString.h"
-
-#define WARN(...) NS_WARNING(nsPrintfCString(__VA_ARGS__).get())
-
-namespace mozilla {
-
-// Create a table which maps GUIDs to a string representation of the GUID.
-// This is useful for debugging purposes, for logging the GUIDs of media types.
-// This is only available when logging is enabled, i.e. not in release builds.
-struct GuidToName {
- const char* name;
- const GUID guid;
-};
-
-#pragma push_macro("OUR_GUID_ENTRY")
-#undef OUR_GUID_ENTRY
-#define OUR_GUID_ENTRY(name, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
- { #name, {l, w1, w2, {b1, b2, b3, b4, b5, b6, b7, b8}} },
-
-static const GuidToName GuidToNameTable[] = {
-#include <uuids.h>
-};
-
-#pragma pop_macro("OUR_GUID_ENTRY")
-
-const char*
-GetDirectShowGuidName(const GUID& aGuid)
-{
- const size_t len = ArrayLength(GuidToNameTable);
- for (unsigned i = 0; i < len; i++) {
- if (IsEqualGUID(aGuid, GuidToNameTable[i].guid)) {
- return GuidToNameTable[i].name;
- }
- }
- return "Unknown";
-}
-
-void
-RemoveGraphFromRunningObjectTable(DWORD aRotRegister)
-{
- RefPtr<IRunningObjectTable> runningObjectTable;
- if (SUCCEEDED(GetRunningObjectTable(0, getter_AddRefs(runningObjectTable)))) {
- runningObjectTable->Revoke(aRotRegister);
- }
-}
-
-HRESULT
-AddGraphToRunningObjectTable(IUnknown *aUnkGraph, DWORD *aOutRotRegister)
-{
- HRESULT hr;
-
- RefPtr<IMoniker> moniker;
- RefPtr<IRunningObjectTable> runningObjectTable;
-
- hr = GetRunningObjectTable(0, getter_AddRefs(runningObjectTable));
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- const size_t STRING_LENGTH = 256;
- WCHAR wsz[STRING_LENGTH];
-
- StringCchPrintfW(wsz,
- STRING_LENGTH,
- L"FilterGraph %08x pid %08x",
- (DWORD_PTR)aUnkGraph,
- GetCurrentProcessId());
-
- hr = CreateItemMoniker(L"!", wsz, getter_AddRefs(moniker));
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- hr = runningObjectTable->Register(ROTFLAGS_REGISTRATIONKEEPSALIVE,
- aUnkGraph,
- moniker,
- aOutRotRegister);
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- return S_OK;
-}
-
-const char*
-GetGraphNotifyString(long evCode)
-{
-#define CASE(x) case x: return #x
- switch(evCode) {
- CASE(EC_ACTIVATE); // A video window is being activated or deactivated.
- CASE(EC_BANDWIDTHCHANGE); // Not supported.
- CASE(EC_BUFFERING_DATA); // The graph is buffering data, or has stopped buffering data.
- CASE(EC_BUILT); // Send by the Video Control when a graph has been built. Not forwarded to applications.
- CASE(EC_CLOCK_CHANGED); // The reference clock has changed.
- CASE(EC_CLOCK_UNSET); // The clock provider was disconnected.
- CASE(EC_CODECAPI_EVENT); // Sent by an encoder to signal an encoding event.
- CASE(EC_COMPLETE); // All data from a particular stream has been rendered.
- CASE(EC_CONTENTPROPERTY_CHANGED); // Not supported.
- CASE(EC_DEVICE_LOST); // A Plug and Play device was removed or has become available again.
- CASE(EC_DISPLAY_CHANGED); // The display mode has changed.
- CASE(EC_END_OF_SEGMENT); // The end of a segment has been reached.
- CASE(EC_EOS_SOON); // Not supported.
- CASE(EC_ERROR_STILLPLAYING); // An asynchronous command to run the graph has failed.
- CASE(EC_ERRORABORT); // An operation was aborted because of an error.
- CASE(EC_ERRORABORTEX); // An operation was aborted because of an error.
- CASE(EC_EXTDEVICE_MODE_CHANGE); // Not supported.
- CASE(EC_FILE_CLOSED); // The source file was closed because of an unexpected event.
- CASE(EC_FULLSCREEN_LOST); // The video renderer is switching out of full-screen mode.
- CASE(EC_GRAPH_CHANGED); // The filter graph has changed.
- CASE(EC_LENGTH_CHANGED); // The length of a source has changed.
- CASE(EC_LOADSTATUS); // Notifies the application of progress when opening a network file.
- CASE(EC_MARKER_HIT); // Not supported.
- CASE(EC_NEED_RESTART); // A filter is requesting that the graph be restarted.
- CASE(EC_NEW_PIN); // Not supported.
- CASE(EC_NOTIFY_WINDOW); // Notifies a filter of the video renderer's window.
- CASE(EC_OLE_EVENT); // A filter is passing a text string to the application.
- CASE(EC_OPENING_FILE); // The graph is opening a file, or has finished opening a file.
- CASE(EC_PALETTE_CHANGED); // The video palette has changed.
- CASE(EC_PAUSED); // A pause request has completed.
- CASE(EC_PLEASE_REOPEN); // The source file has changed.
- CASE(EC_PREPROCESS_COMPLETE); // Sent by the WM ASF Writer filter when it completes the pre-processing for multipass encoding.
- CASE(EC_PROCESSING_LATENCY); // Indicates the amount of time that a component is taking to process each sample.
- CASE(EC_QUALITY_CHANGE); // The graph is dropping samples, for quality control.
- //CASE(EC_RENDER_FINISHED); // Not supported.
- CASE(EC_REPAINT); // A video renderer requires a repaint.
- CASE(EC_SAMPLE_LATENCY); // Specifies how far behind schedule a component is for processing samples.
- //CASE(EC_SAMPLE_NEEDED); // Requests a new input sample from the Enhanced Video Renderer (EVR) filter.
- CASE(EC_SCRUB_TIME); // Specifies the time stamp for the most recent frame step.
- CASE(EC_SEGMENT_STARTED); // A new segment has started.
- CASE(EC_SHUTTING_DOWN); // The filter graph is shutting down, prior to being destroyed.
- CASE(EC_SNDDEV_IN_ERROR); // A device error has occurred in an audio capture filter.
- CASE(EC_SNDDEV_OUT_ERROR); // A device error has occurred in an audio renderer filter.
- CASE(EC_STARVATION); // A filter is not receiving enough data.
- CASE(EC_STATE_CHANGE); // The filter graph has changed state.
- CASE(EC_STATUS); // Contains two arbitrary status strings.
- CASE(EC_STEP_COMPLETE); // A filter performing frame stepping has stepped the specified number of frames.
- CASE(EC_STREAM_CONTROL_STARTED); // A stream-control start command has taken effect.
- CASE(EC_STREAM_CONTROL_STOPPED); // A stream-control stop command has taken effect.
- CASE(EC_STREAM_ERROR_STILLPLAYING); // An error has occurred in a stream. The stream is still playing.
- CASE(EC_STREAM_ERROR_STOPPED); // A stream has stopped because of an error.
- CASE(EC_TIMECODE_AVAILABLE); // Not supported.
- CASE(EC_UNBUILT); // Send by the Video Control when a graph has been torn down. Not forwarded to applications.
- CASE(EC_USERABORT); // The user has terminated playback.
- CASE(EC_VIDEO_SIZE_CHANGED); // The native video size has changed.
- CASE(EC_VIDEOFRAMEREADY); // A video frame is ready for display.
- CASE(EC_VMR_RECONNECTION_FAILED); // Sent by the VMR-7 and the VMR-9 when it was unable to accept a dynamic format change request from the upstream decoder.
- CASE(EC_VMR_RENDERDEVICE_SET); // Sent when the VMR has selected its rendering mechanism.
- CASE(EC_VMR_SURFACE_FLIPPED); // Sent when the VMR-7's allocator presenter has called the DirectDraw Flip method on the surface being presented.
- CASE(EC_WINDOW_DESTROYED); // The video renderer was destroyed or removed from the graph.
- CASE(EC_WMT_EVENT); // Sent by the WM ASF Reader filter when it reads ASF files protected by digital rights management (DRM).
- CASE(EC_WMT_INDEX_EVENT); // Sent when an application uses the WM ASF Writer to index Windows Media Video files.
- CASE(S_OK); // Success.
- CASE(VFW_S_AUDIO_NOT_RENDERED); // Partial success; the audio was not rendered.
- CASE(VFW_S_DUPLICATE_NAME); // Success; the Filter Graph Manager modified a filter name to avoid duplication.
- CASE(VFW_S_PARTIAL_RENDER); // Partial success; some of the streams in this movie are in an unsupported format.
- CASE(VFW_S_VIDEO_NOT_RENDERED); // Partial success; the video was not rendered.
- CASE(E_ABORT); // Operation aborted.
- CASE(E_OUTOFMEMORY); // Insufficient memory.
- CASE(E_POINTER); // Null pointer argument.
- CASE(VFW_E_CANNOT_CONNECT); // No combination of intermediate filters could be found to make the connection.
- CASE(VFW_E_CANNOT_RENDER); // No combination of filters could be found to render the stream.
- CASE(VFW_E_NO_ACCEPTABLE_TYPES); // There is no common media type between these pins.
- CASE(VFW_E_NOT_IN_GRAPH);
-
- default:
- return "Unknown Code";
- };
-#undef CASE
-}
-
-HRESULT
-CreateAndAddFilter(IGraphBuilder* aGraph,
- REFGUID aFilterClsId,
- LPCWSTR aFilterName,
- IBaseFilter **aOutFilter)
-{
- NS_ENSURE_TRUE(aGraph, E_POINTER);
- NS_ENSURE_TRUE(aOutFilter, E_POINTER);
- HRESULT hr;
-
- RefPtr<IBaseFilter> filter;
- hr = CoCreateInstance(aFilterClsId,
- nullptr,
- CLSCTX_INPROC_SERVER,
- IID_IBaseFilter,
- getter_AddRefs(filter));
- if (FAILED(hr)) {
- // Object probably not available on this system.
- WARN("CoCreateInstance failed, hr=%x", hr);
- return hr;
- }
-
- hr = aGraph->AddFilter(filter, aFilterName);
- if (FAILED(hr)) {
- WARN("AddFilter failed, hr=%x", hr);
- return hr;
- }
-
- filter.forget(aOutFilter);
-
- return S_OK;
-}
-
-HRESULT
-CreateMP3DMOWrapperFilter(IBaseFilter **aOutFilter)
-{
- NS_ENSURE_TRUE(aOutFilter, E_POINTER);
- HRESULT hr;
-
- // Create the wrapper filter.
- RefPtr<IBaseFilter> filter;
- hr = CoCreateInstance(CLSID_DMOWrapperFilter,
- nullptr,
- CLSCTX_INPROC_SERVER,
- IID_IBaseFilter,
- getter_AddRefs(filter));
- if (FAILED(hr)) {
- WARN("CoCreateInstance failed, hr=%x", hr);
- return hr;
- }
-
- // Query for IDMOWrapperFilter.
- RefPtr<IDMOWrapperFilter> dmoWrapper;
- hr = filter->QueryInterface(IID_IDMOWrapperFilter,
- getter_AddRefs(dmoWrapper));
- if (FAILED(hr)) {
- WARN("QueryInterface failed, hr=%x", hr);
- return hr;
- }
-
- hr = dmoWrapper->Init(CLSID_CMP3DecMediaObject, DMOCATEGORY_AUDIO_DECODER);
- if (FAILED(hr)) {
- // Can't instantiate MP3 DMO. It doesn't exist on Windows XP, we're
- // probably hitting that. Don't log warning to console, this is an
- // expected error.
- WARN("dmoWrapper Init failed, hr=%x", hr);
- return hr;
- }
-
- filter.forget(aOutFilter);
-
- return S_OK;
-}
-
-HRESULT
-AddMP3DMOWrapperFilter(IGraphBuilder* aGraph,
- IBaseFilter **aOutFilter)
-{
- NS_ENSURE_TRUE(aGraph, E_POINTER);
- NS_ENSURE_TRUE(aOutFilter, E_POINTER);
- HRESULT hr;
-
- // Create the wrapper filter.
- RefPtr<IBaseFilter> filter;
- hr = CreateMP3DMOWrapperFilter(getter_AddRefs(filter));
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- // Add the wrapper filter to graph.
- hr = aGraph->AddFilter(filter, L"MP3 Decoder DMO");
- if (FAILED(hr)) {
- WARN("AddFilter failed, hr=%x", hr);
- return hr;
- }
-
- filter.forget(aOutFilter);
-
- return S_OK;
-}
-
-bool
-CanDecodeMP3UsingDirectShow()
-{
- RefPtr<IBaseFilter> filter;
-
- // Can we create the MP3 demuxer filter?
- if (FAILED(CoCreateInstance(CLSID_MPEG1Splitter,
- nullptr,
- CLSCTX_INPROC_SERVER,
- IID_IBaseFilter,
- getter_AddRefs(filter)))) {
- return false;
- }
-
- // Can we create either the WinXP MP3 decoder filter or the MP3 DMO decoder?
- if (FAILED(CoCreateInstance(DirectShowReader::CLSID_MPEG_LAYER_3_DECODER_FILTER,
- nullptr,
- CLSCTX_INPROC_SERVER,
- IID_IBaseFilter,
- getter_AddRefs(filter))) &&
- FAILED(CreateMP3DMOWrapperFilter(getter_AddRefs(filter)))) {
- return false;
- }
-
- // Else, we can create all of the components we need. Assume
- // DirectShow is going to work...
- return true;
-}
-
-// Match a pin by pin direction and connection state.
-HRESULT
-MatchUnconnectedPin(IPin* aPin,
- PIN_DIRECTION aPinDir,
- bool *aOutMatches)
-{
- NS_ENSURE_TRUE(aPin, E_POINTER);
- NS_ENSURE_TRUE(aOutMatches, E_POINTER);
-
- // Ensure the pin is unconnected.
- RefPtr<IPin> peer;
- HRESULT hr = aPin->ConnectedTo(getter_AddRefs(peer));
- if (hr != VFW_E_NOT_CONNECTED) {
- *aOutMatches = false;
- return hr;
- }
-
- // Ensure the pin is of the specified direction.
- PIN_DIRECTION pinDir;
- hr = aPin->QueryDirection(&pinDir);
- NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
- *aOutMatches = (pinDir == aPinDir);
- return S_OK;
-}
-
-// Return the first unconnected input pin or output pin.
-already_AddRefed<IPin>
-GetUnconnectedPin(IBaseFilter* aFilter, PIN_DIRECTION aPinDir)
-{
- RefPtr<IEnumPins> enumPins;
-
- HRESULT hr = aFilter->EnumPins(getter_AddRefs(enumPins));
- NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
- // Test each pin to see if it matches the direction we're looking for.
- RefPtr<IPin> pin;
- while (S_OK == enumPins->Next(1, getter_AddRefs(pin), nullptr)) {
- bool matches = FALSE;
- if (SUCCEEDED(MatchUnconnectedPin(pin, aPinDir, &matches)) &&
- matches) {
- return pin.forget();
- }
- }
-
- return nullptr;
-}
-
-HRESULT
-ConnectFilters(IGraphBuilder* aGraph,
- IBaseFilter* aOutputFilter,
- IBaseFilter* aInputFilter)
-{
- RefPtr<IPin> output = GetUnconnectedPin(aOutputFilter, PINDIR_OUTPUT);
- NS_ENSURE_TRUE(output, E_FAIL);
-
- RefPtr<IPin> input = GetUnconnectedPin(aInputFilter, PINDIR_INPUT);
- NS_ENSURE_TRUE(output, E_FAIL);
-
- return aGraph->Connect(output, input);
-}
-
-} // namespace mozilla
-
-// avoid redefined macro in unified build
-#undef WARN
diff --git a/dom/media/directshow/DirectShowUtils.h b/dom/media/directshow/DirectShowUtils.h
deleted file mode 100644
index 3bbc122fc..000000000
--- a/dom/media/directshow/DirectShowUtils.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef _DirectShowUtils_h_
-#define _DirectShowUtils_h_
-
-#include <stdint.h>
-#include "dshow.h"
-
-// XXXbz windowsx.h defines GetFirstChild, GetNextSibling,
-// GetPrevSibling are macros, apparently... Eeevil. We have functions
-// called that on some classes, so undef them.
-#undef GetFirstChild
-#undef GetNextSibling
-#undef GetPrevSibling
-
-#include "DShowTools.h"
-#include "mozilla/Logging.h"
-
-namespace mozilla {
-
-// Win32 "Event" wrapper. Must be paired with a CriticalSection to create a
-// Java-style "monitor".
-class Signal {
-public:
-
- Signal(CriticalSection* aLock)
- : mLock(aLock)
- {
- CriticalSectionAutoEnter lock(*mLock);
- mEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);
- }
-
- ~Signal() {
- CriticalSectionAutoEnter lock(*mLock);
- CloseHandle(mEvent);
- }
-
- // Lock must be held.
- void Notify() {
- SetEvent(mEvent);
- }
-
- // Lock must be held. Check the wait condition before waiting!
- HRESULT Wait() {
- mLock->Leave();
- DWORD result = WaitForSingleObject(mEvent, INFINITE);
- mLock->Enter();
- return result == WAIT_OBJECT_0 ? S_OK : E_FAIL;
- }
-
-private:
- CriticalSection* mLock;
- HANDLE mEvent;
-};
-
-HRESULT
-AddGraphToRunningObjectTable(IUnknown *aUnkGraph, DWORD *aOutRotRegister);
-
-void
-RemoveGraphFromRunningObjectTable(DWORD aRotRegister);
-
-const char*
-GetGraphNotifyString(long evCode);
-
-// Creates a filter and adds it to a graph.
-HRESULT
-CreateAndAddFilter(IGraphBuilder* aGraph,
- REFGUID aFilterClsId,
- LPCWSTR aFilterName,
- IBaseFilter **aOutFilter);
-
-HRESULT
-AddMP3DMOWrapperFilter(IGraphBuilder* aGraph,
- IBaseFilter **aOutFilter);
-
-// Connects the output pin on aOutputFilter to an input pin on
-// aInputFilter, in aGraph.
-HRESULT
-ConnectFilters(IGraphBuilder* aGraph,
- IBaseFilter* aOutputFilter,
- IBaseFilter* aInputFilter);
-
-HRESULT
-MatchUnconnectedPin(IPin* aPin,
- PIN_DIRECTION aPinDir,
- bool *aOutMatches);
-
-// Converts from microseconds to DirectShow "Reference Time"
-// (hundreds of nanoseconds).
-inline int64_t
-UsecsToRefTime(const int64_t aUsecs)
-{
- return aUsecs * 10;
-}
-
-// Converts from DirectShow "Reference Time" (hundreds of nanoseconds)
-// to microseconds.
-inline int64_t
-RefTimeToUsecs(const int64_t hRefTime)
-{
- return hRefTime / 10;
-}
-
-// Converts from DirectShow "Reference Time" (hundreds of nanoseconds)
-// to seconds.
-inline double
-RefTimeToSeconds(const REFERENCE_TIME aRefTime)
-{
- return double(aRefTime) / 10000000;
-}
-
-const char*
-GetDirectShowGuidName(const GUID& aGuid);
-
-// Returns true if we can instantiate an MP3 demuxer and decoder filters.
-// Use this to detect whether MP3 support is installed.
-bool
-CanDecodeMP3UsingDirectShow();
-
-} // namespace mozilla
-
-#endif
diff --git a/dom/media/directshow/SampleSink.cpp b/dom/media/directshow/SampleSink.cpp
deleted file mode 100644
index fa5dc8d19..000000000
--- a/dom/media/directshow/SampleSink.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "SampleSink.h"
-#include "AudioSinkFilter.h"
-#include "AudioSinkInputPin.h"
-#include "VideoUtils.h"
-#include "mozilla/Logging.h"
-
-using namespace mozilla::media;
-
-namespace mozilla {
-
-static LazyLogModule gDirectShowLog("DirectShowDecoder");
-#define LOG(...) MOZ_LOG(gDirectShowLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
-
-SampleSink::SampleSink()
- : mMonitor("SampleSink"),
- mIsFlushing(false),
- mAtEOS(false)
-{
- MOZ_COUNT_CTOR(SampleSink);
-}
-
-SampleSink::~SampleSink()
-{
- MOZ_COUNT_DTOR(SampleSink);
-}
-
-void
-SampleSink::SetAudioFormat(const WAVEFORMATEX* aInFormat)
-{
- NS_ENSURE_TRUE(aInFormat, );
- ReentrantMonitorAutoEnter mon(mMonitor);
- memcpy(&mAudioFormat, aInFormat, sizeof(WAVEFORMATEX));
-}
-
-void
-SampleSink::GetAudioFormat(WAVEFORMATEX* aOutFormat)
-{
- MOZ_ASSERT(aOutFormat);
- ReentrantMonitorAutoEnter mon(mMonitor);
- memcpy(aOutFormat, &mAudioFormat, sizeof(WAVEFORMATEX));
-}
-
-HRESULT
-SampleSink::Receive(IMediaSample* aSample)
-{
- ReentrantMonitorAutoEnter mon(mMonitor);
-
- while (true) {
- if (mIsFlushing) {
- return S_FALSE;
- }
- if (!mSample) {
- break;
- }
- if (mAtEOS) {
- return E_UNEXPECTED;
- }
- // Wait until the consumer thread consumes the sample.
- mon.Wait();
- }
-
- if (MOZ_LOG_TEST(gDirectShowLog, LogLevel::Debug)) {
- REFERENCE_TIME start = 0, end = 0;
- HRESULT hr = aSample->GetMediaTime(&start, &end);
- LOG("SampleSink::Receive() [%4.2lf-%4.2lf]",
- (double)RefTimeToUsecs(start) / USECS_PER_S,
- (double)RefTimeToUsecs(end) / USECS_PER_S);
- }
-
- mSample = aSample;
- // Notify the signal, to awaken the consumer thread in WaitForSample()
- // if necessary.
- mon.NotifyAll();
- return S_OK;
-}
-
-HRESULT
-SampleSink::Extract(RefPtr<IMediaSample>& aOutSample)
-{
- ReentrantMonitorAutoEnter mon(mMonitor);
- // Loop until we have a sample, or we should abort.
- while (true) {
- if (mIsFlushing) {
- return S_FALSE;
- }
- if (mSample) {
- break;
- }
- if (mAtEOS) {
- // Order is important here, if we have a sample, we should return it
- // before reporting EOS.
- return E_UNEXPECTED;
- }
- // Wait until the producer thread gives us a sample.
- mon.Wait();
- }
- aOutSample = mSample;
-
- if (MOZ_LOG_TEST(gDirectShowLog, LogLevel::Debug)) {
- int64_t start = 0, end = 0;
- mSample->GetMediaTime(&start, &end);
- LOG("SampleSink::Extract() [%4.2lf-%4.2lf]",
- (double)RefTimeToUsecs(start) / USECS_PER_S,
- (double)RefTimeToUsecs(end) / USECS_PER_S);
- }
-
- mSample = nullptr;
- // Notify the signal, to awaken the producer thread in Receive()
- // if necessary.
- mon.NotifyAll();
- return S_OK;
-}
-
-void
-SampleSink::Flush()
-{
- LOG("SampleSink::Flush()");
- ReentrantMonitorAutoEnter mon(mMonitor);
- mIsFlushing = true;
- mSample = nullptr;
- mon.NotifyAll();
-}
-
-void
-SampleSink::Reset()
-{
- LOG("SampleSink::Reset()");
- ReentrantMonitorAutoEnter mon(mMonitor);
- mIsFlushing = false;
- mAtEOS = false;
-}
-
-void
-SampleSink::SetEOS()
-{
- LOG("SampleSink::SetEOS()");
- ReentrantMonitorAutoEnter mon(mMonitor);
- mAtEOS = true;
- // Notify to unblock any threads waiting for samples in
- // Extract() or Receive(). Now that we're at EOS, no more samples
- // will come!
- mon.NotifyAll();
-}
-
-bool
-SampleSink::AtEOS()
-{
- ReentrantMonitorAutoEnter mon(mMonitor);
- return mAtEOS && !mSample;
-}
-
-} // namespace mozilla
-
diff --git a/dom/media/directshow/SampleSink.h b/dom/media/directshow/SampleSink.h
deleted file mode 100644
index 6a1af9fee..000000000
--- a/dom/media/directshow/SampleSink.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#if !defined(SampleSink_h_)
-#define SampleSink_h_
-
-#include "BaseFilter.h"
-#include "DirectShowUtils.h"
-#include "mozilla/RefPtr.h"
-#include "mozilla/ReentrantMonitor.h"
-
-namespace mozilla {
-
-class SampleSink {
-public:
- SampleSink();
- virtual ~SampleSink();
-
- // Sets the audio format of the incoming samples. The upstream filter
- // calls this. This makes a copy.
- void SetAudioFormat(const WAVEFORMATEX* aInFormat);
-
- // Copies the format of incoming audio samples into into *aOutFormat.
- void GetAudioFormat(WAVEFORMATEX* aOutFormat);
-
- // Called when a sample is delivered by the DirectShow graph to the sink.
- // The decode thread retrieves the sample by calling WaitForSample().
- // Blocks if there's already a sample waiting to be consumed by the decode
- // thread.
- HRESULT Receive(IMediaSample* aSample);
-
- // Retrieves a sample from the sample queue, blocking until one becomes
- // available, or until an error occurs. Returns S_FALSE on EOS.
- HRESULT Extract(RefPtr<IMediaSample>& aOutSample);
-
- // Unblocks any threads waiting in GetSample().
- // Clears mSample, which unblocks upstream stream.
- void Flush();
-
- // Opens up the sink to receive more samples in PutSample().
- // Clears EOS flag.
- void Reset();
-
- // Marks that we've reacehd the end of stream.
- void SetEOS();
-
- // Returns whether we're at end of stream.
- bool AtEOS();
-
-private:
- // All data in this class is syncronized by mMonitor.
- ReentrantMonitor mMonitor;
- RefPtr<IMediaSample> mSample;
-
- // Format of the audio stream we're receiving.
- WAVEFORMATEX mAudioFormat;
-
- bool mIsFlushing;
- bool mAtEOS;
-};
-
-} // namespace mozilla
-
-#endif // SampleSink_h_
diff --git a/dom/media/directshow/SourceFilter.cpp b/dom/media/directshow/SourceFilter.cpp
deleted file mode 100644
index 4c5a0882c..000000000
--- a/dom/media/directshow/SourceFilter.cpp
+++ /dev/null
@@ -1,683 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "SourceFilter.h"
-#include "MediaResource.h"
-#include "mozilla/RefPtr.h"
-#include "DirectShowUtils.h"
-#include "MP3FrameParser.h"
-#include "mozilla/Logging.h"
-#include <algorithm>
-
-using namespace mozilla::media;
-
-namespace mozilla {
-
-// Define to trace what's on...
-//#define DEBUG_SOURCE_TRACE 1
-
-#if defined (DEBUG_SOURCE_TRACE)
-static LazyLogModule gDirectShowLog("DirectShowDecoder");
-#define DIRECTSHOW_LOG(...) MOZ_LOG(gDirectShowLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
-#else
-#define DIRECTSHOW_LOG(...)
-#endif
-
-static HRESULT
-DoGetInterface(IUnknown* aUnknown, void** aInterface)
-{
- if (!aInterface)
- return E_POINTER;
- *aInterface = aUnknown;
- aUnknown->AddRef();
- return S_OK;
-}
-
-// Stores details of IAsyncReader::Request().
-class ReadRequest {
-public:
-
- ReadRequest(IMediaSample* aSample,
- DWORD_PTR aDwUser,
- uint32_t aOffset,
- uint32_t aCount)
- : mSample(aSample),
- mDwUser(aDwUser),
- mOffset(aOffset),
- mCount(aCount)
- {
- MOZ_COUNT_CTOR(ReadRequest);
- }
-
- ~ReadRequest() {
- MOZ_COUNT_DTOR(ReadRequest);
- }
-
- RefPtr<IMediaSample> mSample;
- DWORD_PTR mDwUser;
- uint32_t mOffset;
- uint32_t mCount;
-};
-
-// A wrapper around media resource that presents only a partition of the
-// underlying resource to the caller to use. The partition returned is from
-// an offset to the end of stream, and this object deals with ensuring
-// the offsets and lengths etc are translated from the reduced partition
-// exposed to the caller, to the absolute offsets of the underlying stream.
-class MediaResourcePartition {
-public:
- MediaResourcePartition(MediaResource* aResource,
- int64_t aDataStart)
- : mResource(aResource),
- mDataOffset(aDataStart)
- {}
-
- int64_t GetLength() {
- int64_t len = mResource.GetLength();
- if (len == -1) {
- return len;
- }
- return std::max<int64_t>(0, len - mDataOffset);
- }
- nsresult ReadAt(int64_t aOffset, char* aBuffer,
- uint32_t aCount, uint32_t* aBytes)
- {
- return mResource.ReadAt(aOffset + mDataOffset,
- aBuffer,
- aCount,
- aBytes);
- }
- int64_t GetCachedDataEnd() {
- int64_t tell = mResource.GetResource()->Tell();
- int64_t dataEnd =
- mResource.GetResource()->GetCachedDataEnd(tell) - mDataOffset;
- return dataEnd;
- }
-private:
- // MediaResource from which we read data.
- MediaResourceIndex mResource;
- int64_t mDataOffset;
-};
-
-
-// Output pin for SourceFilter, which implements IAsyncReader, to
-// allow downstream filters to pull/read data from it. Downstream pins
-// register to read data using Request(), and asynchronously wait for the
-// reads to complete using WaitForNext(). They may also synchronously read
-// using SyncRead(). This class is a delegate (tear off) of
-// SourceFilter.
-//
-// We can expose only a segment of the MediaResource to the filter graph.
-// This is used to strip off the ID3v2 tags from the stream, as DirectShow
-// has trouble parsing some headers.
-//
-// Implements:
-// * IAsyncReader
-// * IPin
-// * IQualityControl
-// * IUnknown
-//
-class DECLSPEC_UUID("18e5cfb2-1015-440c-a65c-e63853235894")
-OutputPin : public IAsyncReader,
- public BasePin
-{
-public:
-
- OutputPin(MediaResource* aMediaResource,
- SourceFilter* aParent,
- CriticalSection& aFilterLock,
- int64_t aMP3DataStart);
- virtual ~OutputPin();
-
- // IUnknown
- // Defer to ref counting to BasePin, which defers to owning nsBaseFilter.
- STDMETHODIMP_(ULONG) AddRef() override { return BasePin::AddRef(); }
- STDMETHODIMP_(ULONG) Release() override { return BasePin::Release(); }
- STDMETHODIMP QueryInterface(REFIID iid, void** ppv) override;
-
- // BasePin Overrides.
- // Determines if the pin accepts a specific media type.
- HRESULT CheckMediaType(const MediaType* aMediaType) override;
-
- // Retrieves a preferred media type, by index value.
- HRESULT GetMediaType(int aPosition, MediaType* aMediaType) override;
-
- // Releases the pin from a connection.
- HRESULT BreakConnect(void) override;
-
- // Determines whether a pin connection is suitable.
- HRESULT CheckConnect(IPin* aPin) override;
-
-
- // IAsyncReader overrides
-
- // The RequestAllocator method requests an allocator during the
- // pin connection.
- STDMETHODIMP RequestAllocator(IMemAllocator* aPreferred,
- ALLOCATOR_PROPERTIES* aProps,
- IMemAllocator** aActual) override;
-
- // The Request method queues an asynchronous request for data. Downstream
- // will call WaitForNext() when they want to retrieve the result.
- STDMETHODIMP Request(IMediaSample* aSample, DWORD_PTR aUserData) override;
-
- // The WaitForNext method waits for the next pending read request
- // to complete. This method fails if the graph is flushing.
- // Defers to SyncRead/5.
- STDMETHODIMP WaitForNext(DWORD aTimeout,
- IMediaSample** aSamples,
- DWORD_PTR* aUserData) override;
-
- // The SyncReadAligned method performs a synchronous read. The method
- // blocks until the request is completed. Defers to SyncRead/5. This
- // method does not fail if the graph is flushing.
- STDMETHODIMP SyncReadAligned(IMediaSample* aSample) override;
-
- // The SyncRead method performs a synchronous read. The method blocks
- // until the request is completed. Defers to SyncRead/5. This
- // method does not fail if the graph is flushing.
- STDMETHODIMP SyncRead(LONGLONG aPosition, LONG aLength, BYTE* aBuffer) override;
-
- // The Length method retrieves the total length of the stream.
- STDMETHODIMP Length(LONGLONG* aTotal, LONGLONG* aAvailable) override;
-
- // IPin Overrides
- STDMETHODIMP BeginFlush(void) override;
- STDMETHODIMP EndFlush(void) override;
-
- uint32_t GetAndResetBytesConsumedCount();
-
-private:
-
- // Protects thread-shared data/structures (mFlushCount, mPendingReads).
- // WaitForNext() also waits on this monitor
- CriticalSection& mPinLock;
-
- // Signal used with mPinLock to implement WaitForNext().
- Signal mSignal;
-
- // The filter that owns us. Weak reference, as we're a delegate (tear off).
- SourceFilter* mParentSource;
-
- MediaResourcePartition mResource;
-
- // Counter, inc'd in BeginFlush(), dec'd in EndFlush(). Calls to this can
- // come from multiple threads and can interleave, hence the counter.
- int32_t mFlushCount;
-
- // Number of bytes that have been read from the output pin since the last
- // time GetAndResetBytesConsumedCount() was called.
- uint32_t mBytesConsumed;
-
- // Deque of ReadRequest* for reads that are yet to be serviced.
- // nsReadRequest's are stored on the heap, popper must delete them.
- nsDeque mPendingReads;
-
- // Flags if the downstream pin has QI'd for IAsyncReader. We refuse
- // connection if they don't query, as it means they're assuming that we're
- // a push filter, and we're not.
- bool mQueriedForAsyncReader;
-
-};
-
-// For mingw __uuidof support
-#ifdef __CRT_UUID_DECL
-}
-__CRT_UUID_DECL(mozilla::OutputPin, 0x18e5cfb2,0x1015,0x440c,0xa6,0x5c,0xe6,0x38,0x53,0x23,0x58,0x94);
-namespace mozilla {
-#endif
-
-OutputPin::OutputPin(MediaResource* aResource,
- SourceFilter* aParent,
- CriticalSection& aFilterLock,
- int64_t aMP3DataStart)
- : BasePin(static_cast<BaseFilter*>(aParent),
- &aFilterLock,
- L"MozillaOutputPin",
- PINDIR_OUTPUT),
- mPinLock(aFilterLock),
- mSignal(&mPinLock),
- mParentSource(aParent),
- mResource(aResource, aMP3DataStart),
- mFlushCount(0),
- mBytesConsumed(0),
- mQueriedForAsyncReader(false)
-{
- MOZ_COUNT_CTOR(OutputPin);
- DIRECTSHOW_LOG("OutputPin::OutputPin()");
-}
-
-OutputPin::~OutputPin()
-{
- MOZ_COUNT_DTOR(OutputPin);
- DIRECTSHOW_LOG("OutputPin::~OutputPin()");
-}
-
-HRESULT
-OutputPin::BreakConnect()
-{
- mQueriedForAsyncReader = false;
- return BasePin::BreakConnect();
-}
-
-STDMETHODIMP
-OutputPin::QueryInterface(REFIID aIId, void** aInterface)
-{
- if (aIId == IID_IAsyncReader) {
- mQueriedForAsyncReader = true;
- return DoGetInterface(static_cast<IAsyncReader*>(this), aInterface);
- }
-
- if (aIId == __uuidof(OutputPin)) {
- AddRef();
- *aInterface = this;
- return S_OK;
- }
-
- return BasePin::QueryInterface(aIId, aInterface);
-}
-
-HRESULT
-OutputPin::CheckConnect(IPin* aPin)
-{
- // Our connection is only suitable if the downstream pin knows
- // that we're asynchronous (i.e. it queried for IAsyncReader).
- return mQueriedForAsyncReader ? S_OK : S_FALSE;
-}
-
-HRESULT
-OutputPin::CheckMediaType(const MediaType* aMediaType)
-{
- const MediaType *myMediaType = mParentSource->GetMediaType();
-
- if (IsEqualGUID(aMediaType->majortype, myMediaType->majortype) &&
- IsEqualGUID(aMediaType->subtype, myMediaType->subtype) &&
- IsEqualGUID(aMediaType->formattype, myMediaType->formattype))
- {
- DIRECTSHOW_LOG("OutputPin::CheckMediaType() Match: major=%s minor=%s TC=%d FSS=%d SS=%u",
- GetDirectShowGuidName(aMediaType->majortype),
- GetDirectShowGuidName(aMediaType->subtype),
- aMediaType->TemporalCompression(),
- aMediaType->bFixedSizeSamples,
- aMediaType->SampleSize());
- return S_OK;
- }
-
- DIRECTSHOW_LOG("OutputPin::CheckMediaType() Failed to match: major=%s minor=%s TC=%d FSS=%d SS=%u",
- GetDirectShowGuidName(aMediaType->majortype),
- GetDirectShowGuidName(aMediaType->subtype),
- aMediaType->TemporalCompression(),
- aMediaType->bFixedSizeSamples,
- aMediaType->SampleSize());
- return S_FALSE;
-}
-
-HRESULT
-OutputPin::GetMediaType(int aPosition, MediaType* aMediaType)
-{
- if (!aMediaType)
- return E_POINTER;
-
- if (aPosition == 0) {
- aMediaType->Assign(mParentSource->GetMediaType());
- return S_OK;
- }
- return VFW_S_NO_MORE_ITEMS;
-}
-
-static inline bool
-IsPowerOf2(int32_t x) {
- return ((-x & x) != x);
-}
-
-STDMETHODIMP
-OutputPin::RequestAllocator(IMemAllocator* aPreferred,
- ALLOCATOR_PROPERTIES* aProps,
- IMemAllocator** aActual)
-{
- // Require the downstream pin to suggest what they want...
- if (!aPreferred) return E_POINTER;
- if (!aProps) return E_POINTER;
- if (!aActual) return E_POINTER;
-
- // We only care about alignment - our allocator will reject anything
- // which isn't power-of-2 aligned, so so try a 4-byte aligned allocator.
- ALLOCATOR_PROPERTIES props;
- memcpy(&props, aProps, sizeof(ALLOCATOR_PROPERTIES));
- if (aProps->cbAlign == 0 || IsPowerOf2(aProps->cbAlign)) {
- props.cbAlign = 4;
- }
-
- // Limit allocator's number of buffers. We know that the media will most
- // likely be bound by network speed, not by decoding speed. We also
- // store the incoming data in a Gecko stream, if we don't limit buffers
- // here we'll end up duplicating a lot of storage. We must have enough
- // space for audio key frames to fit in the first batch of buffers however,
- // else pausing may fail for some downstream decoders.
- if (props.cBuffers > BaseFilter::sMaxNumBuffers) {
- props.cBuffers = BaseFilter::sMaxNumBuffers;
- }
-
- // The allocator properties that are actually used. We don't store
- // this, we need it for SetProperties() below to succeed.
- ALLOCATOR_PROPERTIES actualProps;
- HRESULT hr;
-
- if (aPreferred) {
- // Play nice and prefer the downstream pin's preferred allocator.
- hr = aPreferred->SetProperties(&props, &actualProps);
- if (SUCCEEDED(hr)) {
- aPreferred->AddRef();
- *aActual = aPreferred;
- return S_OK;
- }
- }
-
- // Else downstream hasn't requested a specific allocator, so create one...
-
- // Just create a default allocator. It's highly unlikely that we'll use
- // this anyway, as most parsers insist on using their own allocators.
- RefPtr<IMemAllocator> allocator;
- hr = CoCreateInstance(CLSID_MemoryAllocator,
- 0,
- CLSCTX_INPROC_SERVER,
- IID_IMemAllocator,
- getter_AddRefs(allocator));
- if(FAILED(hr) || (allocator == nullptr)) {
- NS_WARNING("Can't create our own DirectShow allocator.");
- return hr;
- }
-
- // See if we can make it suitable
- hr = allocator->SetProperties(&props, &actualProps);
- if (SUCCEEDED(hr)) {
- // We need to release our refcount on pAlloc, and addref
- // it to pass a refcount to the caller - this is a net nothing.
- allocator.forget(aActual);
- return S_OK;
- }
-
- NS_WARNING("Failed to pick an allocator");
- return hr;
-}
-
-STDMETHODIMP
-OutputPin::Request(IMediaSample* aSample, DWORD_PTR aDwUser)
-{
- if (!aSample) return E_FAIL;
-
- CriticalSectionAutoEnter lock(*mLock);
- NS_ASSERTION(!mFlushCount, "Request() while flushing");
-
- if (mFlushCount)
- return VFW_E_WRONG_STATE;
-
- REFERENCE_TIME refStart = 0, refEnd = 0;
- if (FAILED(aSample->GetTime(&refStart, &refEnd))) {
- NS_WARNING("Sample incorrectly timestamped");
- return VFW_E_SAMPLE_TIME_NOT_SET;
- }
-
- // Convert reference time to bytes.
- uint32_t start = (uint32_t)(refStart / 10000000);
- uint32_t end = (uint32_t)(refEnd / 10000000);
-
- uint32_t numBytes = end - start;
-
- ReadRequest* request = new ReadRequest(aSample,
- aDwUser,
- start,
- numBytes);
- // Memory for |request| is free when it's popped from the completed
- // reads list.
-
- // Push this onto the queue of reads to be serviced.
- mPendingReads.Push(request);
-
- // Notify any threads blocked in WaitForNext() which are waiting for mPendingReads
- // to become non-empty.
- mSignal.Notify();
-
- return S_OK;
-}
-
-STDMETHODIMP
-OutputPin::WaitForNext(DWORD aTimeout,
- IMediaSample** aOutSample,
- DWORD_PTR* aOutDwUser)
-{
- NS_ASSERTION(aTimeout == 0 || aTimeout == INFINITE,
- "Oops, we don't handle this!");
-
- *aOutSample = nullptr;
- *aOutDwUser = 0;
-
- LONGLONG offset = 0;
- LONG count = 0;
- BYTE* buf = nullptr;
-
- {
- CriticalSectionAutoEnter lock(*mLock);
-
- // Wait until there's a pending read to service.
- while (aTimeout && mPendingReads.GetSize() == 0 && !mFlushCount) {
- // Note: No need to guard against shutdown-during-wait here, as
- // typically the thread doing the pull will have already called
- // Request(), so we won't Wait() here anyway. SyncRead() will fail
- // on shutdown.
- mSignal.Wait();
- }
-
- nsAutoPtr<ReadRequest> request(reinterpret_cast<ReadRequest*>(mPendingReads.PopFront()));
- if (!request)
- return VFW_E_WRONG_STATE;
-
- *aOutSample = request->mSample;
- *aOutDwUser = request->mDwUser;
-
- offset = request->mOffset;
- count = request->mCount;
- buf = nullptr;
- request->mSample->GetPointer(&buf);
- NS_ASSERTION(buf != nullptr, "Invalid buffer!");
-
- if (mFlushCount) {
- return VFW_E_TIMEOUT;
- }
- }
-
- return SyncRead(offset, count, buf);
-}
-
-STDMETHODIMP
-OutputPin::SyncReadAligned(IMediaSample* aSample)
-{
- {
- // Ignore reads while flushing.
- CriticalSectionAutoEnter lock(*mLock);
- if (mFlushCount) {
- return S_FALSE;
- }
- }
-
- if (!aSample)
- return E_FAIL;
-
- REFERENCE_TIME lStart = 0, lEnd = 0;
- if (FAILED(aSample->GetTime(&lStart, &lEnd))) {
- NS_WARNING("Sample incorrectly timestamped");
- return VFW_E_SAMPLE_TIME_NOT_SET;
- }
-
- // Convert reference time to bytes.
- int32_t start = (int32_t)(lStart / 10000000);
- int32_t end = (int32_t)(lEnd / 10000000);
-
- int32_t numBytes = end - start;
-
- // If the range extends off the end of stream, truncate to the end of stream
- // as per IAsyncReader specificiation.
- int64_t streamLength = mResource.GetLength();
- if (streamLength != -1) {
- // We know the exact length of the stream, fail if the requested offset
- // is beyond it.
- if (start > streamLength) {
- return VFW_E_BADALIGN;
- }
-
- // If the end of the chunk to read is off the end of the stream,
- // truncate it to the end of the stream.
- if ((start + numBytes) > streamLength) {
- numBytes = (uint32_t)(streamLength - start);
- }
- }
-
- BYTE* buf=0;
- aSample->GetPointer(&buf);
-
- return SyncRead(start, numBytes, buf);
-}
-
-STDMETHODIMP
-OutputPin::SyncRead(LONGLONG aPosition,
- LONG aLength,
- BYTE* aBuffer)
-{
- MOZ_ASSERT(!NS_IsMainThread());
- NS_ENSURE_TRUE(aPosition >= 0, E_FAIL);
- NS_ENSURE_TRUE(aLength > 0, E_FAIL);
- NS_ENSURE_TRUE(aBuffer, E_POINTER);
-
- DIRECTSHOW_LOG("OutputPin::SyncRead(%lld, %d)", aPosition, aLength);
- {
- // Ignore reads while flushing.
- CriticalSectionAutoEnter lock(*mLock);
- if (mFlushCount) {
- return S_FALSE;
- }
- }
-
- uint32_t totalBytesRead = 0;
- nsresult rv = mResource.ReadAt(aPosition,
- reinterpret_cast<char*>(aBuffer),
- aLength,
- &totalBytesRead);
- if (NS_FAILED(rv)) {
- return E_FAIL;
- }
- if (totalBytesRead > 0) {
- CriticalSectionAutoEnter lock(*mLock);
- mBytesConsumed += totalBytesRead;
- }
- return (totalBytesRead == aLength) ? S_OK : S_FALSE;
-}
-
-STDMETHODIMP
-OutputPin::Length(LONGLONG* aTotal, LONGLONG* aAvailable)
-{
- HRESULT hr = S_OK;
- int64_t length = mResource.GetLength();
- if (length == -1) {
- hr = VFW_S_ESTIMATED;
- // Don't have a length. Just lie, it seems to work...
- *aTotal = INT32_MAX;
- } else {
- *aTotal = length;
- }
- if (aAvailable) {
- *aAvailable = mResource.GetCachedDataEnd();
- }
-
- DIRECTSHOW_LOG("OutputPin::Length() len=%lld avail=%lld", *aTotal, *aAvailable);
-
- return hr;
-}
-
-STDMETHODIMP
-OutputPin::BeginFlush()
-{
- CriticalSectionAutoEnter lock(*mLock);
- mFlushCount++;
- mSignal.Notify();
- return S_OK;
-}
-
-STDMETHODIMP
-OutputPin::EndFlush(void)
-{
- CriticalSectionAutoEnter lock(*mLock);
- mFlushCount--;
- return S_OK;
-}
-
-uint32_t
-OutputPin::GetAndResetBytesConsumedCount()
-{
- CriticalSectionAutoEnter lock(*mLock);
- uint32_t bytesConsumed = mBytesConsumed;
- mBytesConsumed = 0;
- return bytesConsumed;
-}
-
-SourceFilter::SourceFilter(const GUID& aMajorType,
- const GUID& aSubType)
- : BaseFilter(L"MozillaDirectShowSource", __uuidof(SourceFilter))
-{
- MOZ_COUNT_CTOR(SourceFilter);
- mMediaType.majortype = aMajorType;
- mMediaType.subtype = aSubType;
-
- DIRECTSHOW_LOG("SourceFilter Constructor(%s, %s)",
- GetDirectShowGuidName(aMajorType),
- GetDirectShowGuidName(aSubType));
-}
-
-SourceFilter::~SourceFilter()
-{
- MOZ_COUNT_DTOR(SourceFilter);
- DIRECTSHOW_LOG("SourceFilter Destructor()");
-}
-
-BasePin*
-SourceFilter::GetPin(int n)
-{
- if (n == 0) {
- NS_ASSERTION(mOutputPin != 0, "GetPin with no pin!");
- return static_cast<BasePin*>(mOutputPin);
- } else {
- return nullptr;
- }
-}
-
-// Get's the media type we're supplying.
-const MediaType*
-SourceFilter::GetMediaType() const
-{
- return &mMediaType;
-}
-
-nsresult
-SourceFilter::Init(MediaResource* aResource, int64_t aMP3Offset)
-{
- DIRECTSHOW_LOG("SourceFilter::Init()");
-
- mOutputPin = new OutputPin(aResource,
- this,
- mLock,
- aMP3Offset);
- NS_ENSURE_TRUE(mOutputPin != nullptr, NS_ERROR_FAILURE);
-
- return NS_OK;
-}
-
-uint32_t
-SourceFilter::GetAndResetBytesConsumedCount()
-{
- return mOutputPin->GetAndResetBytesConsumedCount();
-}
-
-
-} // namespace mozilla
diff --git a/dom/media/directshow/SourceFilter.h b/dom/media/directshow/SourceFilter.h
deleted file mode 100644
index d5ce2770e..000000000
--- a/dom/media/directshow/SourceFilter.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#if !defined(nsDirectShowSource_h___)
-#define nsDirectShowSource_h___
-
-#include "BaseFilter.h"
-#include "BasePin.h"
-#include "MediaType.h"
-
-#include "nsDeque.h"
-#include "nsAutoPtr.h"
-#include "DirectShowUtils.h"
-#include "mozilla/RefPtr.h"
-
-namespace mozilla {
-
-class MediaResource;
-class OutputPin;
-
-
-// SourceFilter is an asynchronous DirectShow source filter which
-// reads from an MediaResource, and supplies data via a pull model downstream
-// using OutputPin. It us used to supply a generic byte stream into
-// DirectShow.
-//
-// Implements:
-// * IBaseFilter
-// * IMediaFilter
-// * IPersist
-// * IUnknown
-//
-class DECLSPEC_UUID("5c2a7ad0-ba82-4659-9178-c4719a2765d6")
-SourceFilter : public media::BaseFilter
-{
-public:
-
- // Constructs source filter to deliver given media type.
- SourceFilter(const GUID& aMajorType, const GUID& aSubType);
- ~SourceFilter();
-
- nsresult Init(MediaResource *aResource, int64_t aMP3Offset);
-
- // BaseFilter overrides.
- // Only one output - the byte stream.
- int GetPinCount() override { return 1; }
-
- media::BasePin* GetPin(int n) override;
-
- // Get's the media type we're supplying.
- const media::MediaType* GetMediaType() const;
-
- uint32_t GetAndResetBytesConsumedCount();
-
-protected:
-
- // Our async pull output pin.
- nsAutoPtr<OutputPin> mOutputPin;
-
- // Type of byte stream we output.
- media::MediaType mMediaType;
-
-};
-
-} // namespace mozilla
-
-// For mingw __uuidof support
-#ifdef __CRT_UUID_DECL
-__CRT_UUID_DECL(mozilla::SourceFilter, 0x5c2a7ad0,0xba82,0x4659,0x91,0x78,0xc4,0x71,0x9a,0x27,0x65,0xd6);
-#endif
-
-#endif
diff --git a/dom/media/directshow/moz.build b/dom/media/directshow/moz.build
deleted file mode 100644
index 8a9b76200..000000000
--- a/dom/media/directshow/moz.build
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-EXPORTS += [
- 'AudioSinkFilter.h',
- 'AudioSinkInputPin.h',
- 'DirectShowDecoder.h',
- 'DirectShowReader.h',
- 'DirectShowUtils.h',
-]
-
-UNIFIED_SOURCES += [
- 'DirectShowDecoder.cpp',
- 'DirectShowUtils.cpp',
- 'SourceFilter.cpp',
-]
-
-SOURCES += [
- 'AudioSinkFilter.cpp',
- 'AudioSinkInputPin.cpp',
- 'DirectShowReader.cpp',
- 'SampleSink.cpp',
-]
-
-# If WebRTC isn't being built, we need to compile the DirectShow base classes so that
-# they're available at link time.
-if not CONFIG['MOZ_WEBRTC']:
- SOURCES += [
- '/media/webrtc/trunk/webrtc/modules/video_capture/windows/BaseFilter.cpp',
- '/media/webrtc/trunk/webrtc/modules/video_capture/windows/BaseInputPin.cpp',
- '/media/webrtc/trunk/webrtc/modules/video_capture/windows/BasePin.cpp',
- '/media/webrtc/trunk/webrtc/modules/video_capture/windows/MediaType.cpp',
- ]
-
-FINAL_LIBRARY = 'xul'
-LOCAL_INCLUDES += [
- '/media/webrtc/trunk/webrtc/modules/video_capture/windows',
-]
diff --git a/dom/media/fmp4/MP4Decoder.cpp b/dom/media/fmp4/MP4Decoder.cpp
index fdd6f2c7e..6954e9757 100644
--- a/dom/media/fmp4/MP4Decoder.cpp
+++ b/dom/media/fmp4/MP4Decoder.cpp
@@ -83,10 +83,6 @@ MP4Decoder::CanHandleMediaType(const MediaContentType& aType,
const bool isMP4Audio = aType.GetMIMEType().EqualsASCII("audio/mp4") ||
aType.GetMIMEType().EqualsASCII("audio/x-m4a");
const bool isMP4Video =
- // On B2G, treat 3GPP as MP4 when Gonk PDM is available.
-#ifdef MOZ_GONK_MEDIACODEC
- aType.GetMIMEType().EqualsASCII(VIDEO_3GPP) ||
-#endif
aType.GetMIMEType().EqualsASCII("video/mp4") ||
aType.GetMIMEType().EqualsASCII("video/quicktime") ||
aType.GetMIMEType().EqualsASCII("video/x-m4v");
@@ -139,6 +135,14 @@ MP4Decoder::CanHandleMediaType(const MediaContentType& aType,
NS_LITERAL_CSTRING("audio/flac"), aType));
continue;
}
+#ifdef MOZ_AV1
+ if (IsAV1CodecString(codec)) {
+ trackInfos.AppendElement(
+ CreateTrackInfoWithMIMETypeAndContentTypeExtraParameters(
+ NS_LITERAL_CSTRING("video/av1"), aType));
+ continue;
+ }
+#endif
// Note: Only accept H.264 in a video content type, not in an audio
// content type.
if (IsWhitelistedH264Codec(codec) && isMP4Video) {
diff --git a/dom/media/fmp4/MP4Stream.cpp b/dom/media/fmp4/MP4Stream.cpp
index 615a7dc01..9a79cac7a 100644
--- a/dom/media/fmp4/MP4Stream.cpp
+++ b/dom/media/fmp4/MP4Stream.cpp
@@ -48,9 +48,6 @@ MP4Stream::BlockingReadIntoCache(int64_t aOffset, size_t aCount, Monitor* aToUnl
return true;
}
-// We surreptitiously reimplement the supposedly-blocking ReadAt as a non-
-// blocking CachedReadAt, and record when it fails. This allows MP4Reader
-// to retry the read as an actual blocking read without holding the lock.
bool
MP4Stream::ReadAt(int64_t aOffset, void* aBuffer, size_t aCount,
size_t* aBytesRead)
diff --git a/dom/media/fmp4/moz.build b/dom/media/fmp4/moz.build
index 6a249ae3e..a79fb0229 100644
--- a/dom/media/fmp4/moz.build
+++ b/dom/media/fmp4/moz.build
@@ -20,6 +20,3 @@ SOURCES += [
]
FINAL_LIBRARY = 'xul'
-
-if CONFIG['MOZ_GONK_MEDIACODEC']:
- DEFINES['MOZ_GONK_MEDIACODEC'] = True
diff --git a/dom/media/gmp/GMPParent.cpp b/dom/media/gmp/GMPParent.cpp
index 40c3e5141..418f14736 100644
--- a/dom/media/gmp/GMPParent.cpp
+++ b/dom/media/gmp/GMPParent.cpp
@@ -516,8 +516,7 @@ GMPCapability::Supports(const nsTArray<GMPCapability>& aCapabilities,
#ifdef XP_WIN
// Clearkey on Windows advertises that it can decode in its GMP info
// file, but uses Windows Media Foundation to decode. That's not present
- // on Windows XP, and on some Vista, Windows N, and KN variants without
- // certain services packs.
+ // on Windows N and KN variants without certain services packs.
if (tag.Equals(kEMEKeySystemClearkey)) {
if (capabilities.mAPIName.EqualsLiteral(GMP_API_VIDEO_DECODER)) {
if (!WMFDecoderModule::HasH264()) {
diff --git a/dom/media/gtest/Cargo.toml b/dom/media/gtest/Cargo.toml
deleted file mode 100644
index a55f8fb68..000000000
--- a/dom/media/gtest/Cargo.toml
+++ /dev/null
@@ -1,7 +0,0 @@
-[package]
-name = "mp4parse-gtest"
-version = "0.1.0"
-authors = ["nobody@mozilla.org"]
-
-[lib]
-path = "hello.rs"
diff --git a/dom/media/gtest/TestMP3Demuxer.cpp b/dom/media/gtest/TestMP3Demuxer.cpp
index 8d2109f00..934acb60e 100644
--- a/dom/media/gtest/TestMP3Demuxer.cpp
+++ b/dom/media/gtest/TestMP3Demuxer.cpp
@@ -11,7 +11,6 @@
#include "MockMediaResource.h"
using namespace mozilla;
-using namespace mozilla::mp3;
using media::TimeUnit;
diff --git a/dom/media/gtest/TestMP4Reader.cpp b/dom/media/gtest/TestMP4Reader.cpp
deleted file mode 100644
index f08f7a40d..000000000
--- a/dom/media/gtest/TestMP4Reader.cpp
+++ /dev/null
@@ -1,217 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "gtest/gtest.h"
-#include "MP4Reader.h"
-#include "MP4Decoder.h"
-#include "mozilla/SharedThreadPool.h"
-#include "MockMediaResource.h"
-#include "MockMediaDecoderOwner.h"
-#include "mozilla/Preferences.h"
-#include "TimeUnits.h"
-
-using namespace mozilla;
-using namespace mozilla::dom;
-
-class TestBinding
-{
-public:
- NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TestBinding);
-
- RefPtr<MP4Decoder> decoder;
- RefPtr<MockMediaResource> resource;
- RefPtr<MP4Reader> reader;
-
- explicit TestBinding(const char* aFileName = "gizmo.mp4")
- : decoder(new MP4Decoder())
- , resource(new MockMediaResource(aFileName))
- , reader(new MP4Reader(decoder))
- {
- EXPECT_EQ(NS_OK, Preferences::SetBool(
- "media.use-blank-decoder", true));
-
- EXPECT_EQ(NS_OK, resource->Open(nullptr));
- decoder->SetResource(resource);
-
- reader->Init(nullptr);
- // This needs to be done before invoking GetBuffered. This is normally
- // done by MediaDecoderStateMachine.
- reader->DispatchSetStartTime(0);
- }
-
- void Init() {
- nsCOMPtr<nsIThread> thread;
- nsCOMPtr<nsIRunnable> r = NewRunnableMethod(this, &TestBinding::ReadMetadata);
- nsresult rv = NS_NewThread(getter_AddRefs(thread), r);
- EXPECT_EQ(NS_OK, rv);
- thread->Shutdown();
- }
-
-private:
- virtual ~TestBinding()
- {
- {
- RefPtr<TaskQueue> queue = reader->OwnerThread();
- nsCOMPtr<nsIRunnable> task = NewRunnableMethod(reader, &MP4Reader::Shutdown);
- // Hackily bypass the tail dispatcher so that we can AwaitShutdownAndIdle.
- // In production code we'd use BeginShutdown + promises.
- queue->Dispatch(task.forget(), AbstractThread::AssertDispatchSuccess,
- AbstractThread::TailDispatch);
- queue->AwaitShutdownAndIdle();
- }
- decoder = nullptr;
- resource = nullptr;
- reader = nullptr;
- SharedThreadPool::SpinUntilEmpty();
- }
-
- void ReadMetadata()
- {
- MediaInfo info;
- MetadataTags* tags;
- EXPECT_EQ(NS_OK, reader->ReadMetadata(&info, &tags));
- }
-};
-
-TEST(MP4Reader, BufferedRange)
-{
- RefPtr<TestBinding> b = new TestBinding();
- b->Init();
-
- // Video 3-4 sec, audio 2.986666-4.010666 sec
- b->resource->MockAddBufferedRange(248400, 327455);
-
- media::TimeIntervals ranges = b->reader->GetBuffered();
- EXPECT_EQ(1U, ranges.Length());
- EXPECT_NEAR(270000 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
- EXPECT_NEAR(360000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
-}
-
-TEST(MP4Reader, BufferedRangeMissingLastByte)
-{
- RefPtr<TestBinding> b = new TestBinding();
- b->Init();
-
- // Dropping the last byte of the video
- b->resource->MockClearBufferedRanges();
- b->resource->MockAddBufferedRange(248400, 324912);
- b->resource->MockAddBufferedRange(324913, 327455);
-
- media::TimeIntervals ranges = b->reader->GetBuffered();
- EXPECT_EQ(1U, ranges.Length());
- EXPECT_NEAR(270000.0 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
- EXPECT_NEAR(357000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
-}
-
-TEST(MP4Reader, BufferedRangeSyncFrame)
-{
- RefPtr<TestBinding> b = new TestBinding();
- b->Init();
-
- // Check that missing the first byte at 2 seconds skips right through to 3
- // seconds because of a missing sync frame
- b->resource->MockClearBufferedRanges();
- b->resource->MockAddBufferedRange(146336, 327455);
-
- media::TimeIntervals ranges = b->reader->GetBuffered();
- EXPECT_EQ(1U, ranges.Length());
- EXPECT_NEAR(270000.0 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
- EXPECT_NEAR(360000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
-}
-
-TEST(MP4Reader, CompositionOrder)
-{
- RefPtr<TestBinding> b = new TestBinding("mediasource_test.mp4");
- b->Init();
-
- // The first 5 video samples of this file are:
- // Video timescale=2500
- // Frame Start Size Time Duration Sync
- // 1 48 5455 166 83 Yes
- // 2 5503 145 249 83
- // 3 6228 575 581 83
- // 4 7383 235 415 83
- // 5 8779 183 332 83
- // 6 9543 191 498 83
- //
- // Audio timescale=44100
- // 1 5648 580 0 1024 Yes
- // 2 6803 580 1024 1058 Yes
- // 3 7618 581 2082 1014 Yes
- // 4 8199 580 3096 1015 Yes
- // 5 8962 581 4111 1014 Yes
- // 6 9734 580 5125 1014 Yes
- // 7 10314 581 6139 1059 Yes
- // 8 11207 580 7198 1014 Yes
- // 9 12035 581 8212 1014 Yes
- // 10 12616 580 9226 1015 Yes
- // 11 13220 581 10241 1014 Yes
-
- b->resource->MockClearBufferedRanges();
- // First two frames in decoding + first audio frame
- b->resource->MockAddBufferedRange(48, 5503); // Video 1
- b->resource->MockAddBufferedRange(5503, 5648); // Video 2
- b->resource->MockAddBufferedRange(6228, 6803); // Video 3
-
- // Audio - 5 frames; 0 - 139206 us
- b->resource->MockAddBufferedRange(5648, 6228);
- b->resource->MockAddBufferedRange(6803, 7383);
- b->resource->MockAddBufferedRange(7618, 8199);
- b->resource->MockAddBufferedRange(8199, 8779);
- b->resource->MockAddBufferedRange(8962, 9563);
- b->resource->MockAddBufferedRange(9734, 10314);
- b->resource->MockAddBufferedRange(10314, 10895);
- b->resource->MockAddBufferedRange(11207, 11787);
- b->resource->MockAddBufferedRange(12035, 12616);
- b->resource->MockAddBufferedRange(12616, 13196);
- b->resource->MockAddBufferedRange(13220, 13901);
-
- media::TimeIntervals ranges = b->reader->GetBuffered();
- EXPECT_EQ(2U, ranges.Length());
-
- EXPECT_NEAR(166.0 / 2500.0, ranges.Start(0).ToSeconds(), 0.000001);
- EXPECT_NEAR(332.0 / 2500.0, ranges.End(0).ToSeconds(), 0.000001);
-
- EXPECT_NEAR(581.0 / 2500.0, ranges.Start(1).ToSeconds(), 0.000001);
- EXPECT_NEAR(11255.0 / 44100.0, ranges.End(1).ToSeconds(), 0.000001);
-}
-
-TEST(MP4Reader, Normalised)
-{
- RefPtr<TestBinding> b = new TestBinding("mediasource_test.mp4");
- b->Init();
-
- // The first 5 video samples of this file are:
- // Video timescale=2500
- // Frame Start Size Time Duration Sync
- // 1 48 5455 166 83 Yes
- // 2 5503 145 249 83
- // 3 6228 575 581 83
- // 4 7383 235 415 83
- // 5 8779 183 332 83
- // 6 9543 191 498 83
- //
- // Audio timescale=44100
- // 1 5648 580 0 1024 Yes
- // 2 6803 580 1024 1058 Yes
- // 3 7618 581 2082 1014 Yes
- // 4 8199 580 3096 1015 Yes
- // 5 8962 581 4111 1014 Yes
- // 6 9734 580 5125 1014 Yes
- // 7 10314 581 6139 1059 Yes
- // 8 11207 580 7198 1014 Yes
- // 9 12035 581 8212 1014 Yes
- // 10 12616 580 9226 1015 Yes
- // 11 13220 581 10241 1014 Yes
-
- b->resource->MockClearBufferedRanges();
- b->resource->MockAddBufferedRange(48, 13901);
-
- media::TimeIntervals ranges = b->reader->GetBuffered();
- EXPECT_EQ(1U, ranges.Length());
-
- EXPECT_NEAR(166.0 / 2500.0, ranges.Start(0).ToSeconds(), 0.000001);
- EXPECT_NEAR(11255.0 / 44100.0, ranges.End(0).ToSeconds(), 0.000001);
-}
diff --git a/dom/media/gtest/TestRust.cpp b/dom/media/gtest/TestRust.cpp
deleted file mode 100644
index 86d0e99b8..000000000
--- a/dom/media/gtest/TestRust.cpp
+++ /dev/null
@@ -1,9 +0,0 @@
-#include <stdint.h>
-#include "gtest/gtest.h"
-
-extern "C" uint8_t* test_rust();
-
-TEST(rust, CallFromCpp) {
- auto greeting = test_rust();
- EXPECT_STREQ(reinterpret_cast<char*>(greeting), "hello from rust.");
-}
diff --git a/dom/media/gtest/hello.rs b/dom/media/gtest/hello.rs
deleted file mode 100644
index cd111882a..000000000
--- a/dom/media/gtest/hello.rs
+++ /dev/null
@@ -1,6 +0,0 @@
-#[no_mangle]
-pub extern fn test_rust() -> *const u8 {
- // NB: rust &str aren't null terminated.
- let greeting = "hello from rust.\0";
- greeting.as_ptr()
-}
diff --git a/dom/media/gtest/moz.build b/dom/media/gtest/moz.build
index d5d02bced..a7ea73807 100644
--- a/dom/media/gtest/moz.build
+++ b/dom/media/gtest/moz.build
@@ -21,7 +21,6 @@ UNIFIED_SOURCES += [
'TestMozPromise.cpp',
'TestMP3Demuxer.cpp',
'TestMP4Demuxer.cpp',
- # 'TestMP4Reader.cpp', disabled so we can turn check tests back on (bug 1175752)
'TestTrackEncoder.cpp',
'TestVideoSegment.cpp',
'TestVideoUtils.cpp',
diff --git a/dom/media/mediasource/MediaSource.cpp b/dom/media/mediasource/MediaSource.cpp
index af541bbbb..152c0085a 100644
--- a/dom/media/mediasource/MediaSource.cpp
+++ b/dom/media/mediasource/MediaSource.cpp
@@ -62,8 +62,6 @@ namespace mozilla {
// Returns true if we should enable MSE webm regardless of preferences.
// 1. If MP4/H264 isn't supported:
-// * Windows XP
-// * Windows Vista and Server 2008 without the optional "Platform Update Supplement"
// * N/KN editions (Europe and Korea) of Windows 7/8/8.1/10 without the
// optional "Windows Media Feature Pack"
// 2. If H264 hardware acceleration is not available.
diff --git a/dom/media/mediasource/moz.build b/dom/media/mediasource/moz.build
index 6ded1875d..a1689c216 100644
--- a/dom/media/mediasource/moz.build
+++ b/dom/media/mediasource/moz.build
@@ -38,9 +38,6 @@ TEST_DIRS += [
'gtest',
]
-if CONFIG['MOZ_GONK_MEDIACODEC']:
- DEFINES['MOZ_GONK_MEDIACODEC'] = True
-
include('/ipc/chromium/chromium-config.mozbuild')
FINAL_LIBRARY = 'xul'
diff --git a/dom/media/moz.build b/dom/media/moz.build
index 4d036a5f6..df8cb619d 100644
--- a/dom/media/moz.build
+++ b/dom/media/moz.build
@@ -30,6 +30,7 @@ DIRS += [
'ipc',
'mediasink',
'mediasource',
+ 'mp3',
'ogg',
'platforms',
'systemservices',
@@ -42,12 +43,6 @@ DIRS += [
'standalone',
]
-if CONFIG['MOZ_DIRECTSHOW']:
- DIRS += ['directshow']
-
-if CONFIG['MOZ_ANDROID_OMX']:
- DIRS += ['android']
-
if CONFIG['MOZ_FMP4']:
DIRS += ['fmp4']
@@ -128,9 +123,6 @@ EXPORTS += [
'MediaTimer.h',
'MediaTrack.h',
'MediaTrackList.h',
- 'MP3Decoder.h',
- 'MP3Demuxer.h',
- 'MP3FrameParser.h',
'NextFrameSeekTask.h',
'nsIDocumentActivity.h',
'PrincipalChangeObserver.h',
@@ -237,9 +229,6 @@ UNIFIED_SOURCES += [
'MediaTimer.cpp',
'MediaTrack.cpp',
'MediaTrackList.cpp',
- 'MP3Decoder.cpp',
- 'MP3Demuxer.cpp',
- 'MP3FrameParser.cpp',
'NextFrameSeekTask.cpp',
'QueueObject.cpp',
'SeekJob.cpp',
@@ -294,11 +283,6 @@ LOCAL_INCLUDES += [
'/netwerk/base',
]
-if CONFIG['MOZ_DIRECTSHOW']:
- LOCAL_INCLUDES += [
- '/media/webrtc/trunk/webrtc/modules/video_capture/windows',
- ]
-
if CONFIG['MOZ_WEBRTC']:
LOCAL_INCLUDES += [
'/media/webrtc/signaling/src/common',
diff --git a/dom/media/MP3Decoder.cpp b/dom/media/mp3/MP3Decoder.cpp
index b71111e79..074a0866d 100644
--- a/dom/media/MP3Decoder.cpp
+++ b/dom/media/mp3/MP3Decoder.cpp
@@ -24,7 +24,7 @@ MP3Decoder::Clone(MediaDecoderOwner* aOwner) {
MediaDecoderStateMachine*
MP3Decoder::CreateStateMachine() {
RefPtr<MediaDecoderReader> reader =
- new MediaFormatReader(this, new mp3::MP3Demuxer(GetResource()));
+ new MediaFormatReader(this, new MP3Demuxer(GetResource()));
return new MediaDecoderStateMachine(this, reader);
}
diff --git a/dom/media/MP3Decoder.h b/dom/media/mp3/MP3Decoder.h
index 887251065..887251065 100644
--- a/dom/media/MP3Decoder.h
+++ b/dom/media/mp3/MP3Decoder.h
diff --git a/dom/media/MP3Demuxer.cpp b/dom/media/mp3/MP3Demuxer.cpp
index 7d478a41b..5a98cabfe 100644
--- a/dom/media/MP3Demuxer.cpp
+++ b/dom/media/mp3/MP3Demuxer.cpp
@@ -33,7 +33,6 @@ using mozilla::media::TimeIntervals;
using mp4_demuxer::ByteReader;
namespace mozilla {
-namespace mp3 {
// MP3Demuxer
@@ -1338,5 +1337,4 @@ ID3Parser::ID3Header::Update(uint8_t c) {
return IsValid(mPos++);
}
-} // namespace mp3
} // namespace mozilla
diff --git a/dom/media/MP3Demuxer.h b/dom/media/mp3/MP3Demuxer.h
index 03e67b0d9..5331c4d54 100644
--- a/dom/media/MP3Demuxer.h
+++ b/dom/media/mp3/MP3Demuxer.h
@@ -13,7 +13,6 @@
#include <vector>
namespace mozilla {
-namespace mp3 {
class MP3TrackDemuxer;
@@ -468,7 +467,6 @@ private:
UniquePtr<AudioInfo> mInfo;
};
-} // namespace mp3
} // namespace mozilla
#endif
diff --git a/dom/media/mp3/moz.build b/dom/media/mp3/moz.build
new file mode 100644
index 000000000..596d061f8
--- /dev/null
+++ b/dom/media/mp3/moz.build
@@ -0,0 +1,17 @@
+# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+EXPORTS += [
+ 'MP3Decoder.h',
+ 'MP3Demuxer.h',
+]
+
+UNIFIED_SOURCES += [
+ 'MP3Decoder.cpp',
+ 'MP3Demuxer.cpp',
+]
+
+FINAL_LIBRARY = 'xul'
diff --git a/dom/media/platforms/MediaTelemetryConstants.h b/dom/media/platforms/MediaTelemetryConstants.h
deleted file mode 100644
index 5024949a8..000000000
--- a/dom/media/platforms/MediaTelemetryConstants.h
+++ /dev/null
@@ -1,22 +0,0 @@
-/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
- *
- * This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-#ifndef dom_media_platforms_MediaTelemetryConstants_h___
-#define dom_media_platforms_MediaTelemetryConstants_h___
-
-namespace mozilla {
-namespace media {
-
-enum class MediaDecoderBackend : uint32_t
-{
- WMFSoftware = 0,
- WMFDXVA2D3D9 = 1,
- WMFDXVA2D3D11 = 2
-};
-
-} // namespace media
-} // namespace mozilla
-
-#endif // dom_media_platforms_MediaTelemetryConstants_h___
diff --git a/dom/media/platforms/PDMFactory.cpp b/dom/media/platforms/PDMFactory.cpp
index c1e58fdc2..5bfdcffb7 100644
--- a/dom/media/platforms/PDMFactory.cpp
+++ b/dom/media/platforms/PDMFactory.cpp
@@ -19,9 +19,6 @@
#ifdef MOZ_APPLEMEDIA
#include "AppleDecoderModule.h"
#endif
-#ifdef MOZ_GONK_MEDIACODEC
-#include "GonkDecoderModule.h"
-#endif
#ifdef MOZ_WIDGET_ANDROID
#include "AndroidDecoderModule.h"
#endif
@@ -390,12 +387,6 @@ PDMFactory::CreatePDMs()
m = new AppleDecoderModule();
StartupPDM(m);
#endif
-#ifdef MOZ_GONK_MEDIACODEC
- if (MediaPrefs::PDMGonkDecoderEnabled()) {
- m = new GonkDecoderModule();
- StartupPDM(m);
- }
-#endif
#ifdef MOZ_WIDGET_ANDROID
if(MediaPrefs::PDMAndroidMediaCodecEnabled()){
m = new AndroidDecoderModule();
diff --git a/dom/media/platforms/moz.build b/dom/media/platforms/moz.build
index be13d31c4..f5fb72c5d 100644
--- a/dom/media/platforms/moz.build
+++ b/dom/media/platforms/moz.build
@@ -10,7 +10,6 @@ EXPORTS += [
'agnostic/TheoraDecoder.h',
'agnostic/VorbisDecoder.h',
'agnostic/VPXDecoder.h',
- 'MediaTelemetryConstants.h',
'PDMFactory.h',
'PlatformDecoderModule.h',
'wrappers/FuzzingWrapper.h',
diff --git a/dom/media/platforms/omx/OmxPlatformLayer.cpp b/dom/media/platforms/omx/OmxPlatformLayer.cpp
index 039b4a22f..15b3062a4 100644
--- a/dom/media/platforms/omx/OmxPlatformLayer.cpp
+++ b/dom/media/platforms/omx/OmxPlatformLayer.cpp
@@ -282,26 +282,7 @@ OmxPlatformLayer::CompressionFormat()
}
}
-// Implementations for different platforms will be defined in their own files.
-#ifdef OMX_PLATFORM_GONK
-
-bool
-OmxPlatformLayer::SupportsMimeType(const nsACString& aMimeType)
-{
- return GonkOmxPlatformLayer::FindComponents(aMimeType);
-}
-
-OmxPlatformLayer*
-OmxPlatformLayer::Create(OmxDataDecoder* aDataDecoder,
- OmxPromiseLayer* aPromiseLayer,
- TaskQueue* aTaskQueue,
- layers::ImageContainer* aImageContainer)
-{
- return new GonkOmxPlatformLayer(aDataDecoder, aPromiseLayer, aTaskQueue, aImageContainer);
-}
-
-#else // For platforms without OMX IL support.
-
+// For platforms without OMX IL support.
bool
OmxPlatformLayer::SupportsMimeType(const nsACString& aMimeType)
{
@@ -317,6 +298,4 @@ OmxPlatformLayer::Create(OmxDataDecoder* aDataDecoder,
return nullptr;
}
-#endif
-
}
diff --git a/dom/media/platforms/wmf/DXVA2Manager.cpp b/dom/media/platforms/wmf/DXVA2Manager.cpp
index 1226ea621..69e002f7f 100644
--- a/dom/media/platforms/wmf/DXVA2Manager.cpp
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -14,7 +14,6 @@
#include "mozilla/layers/D3D11ShareHandleImage.h"
#include "mozilla/layers/ImageBridgeChild.h"
#include "mozilla/layers/TextureForwarder.h"
-#include "MediaTelemetryConstants.h"
#include "mfapi.h"
#include "gfxPrefs.h"
#include "MFTDecoder.h"
diff --git a/dom/media/platforms/wmf/WMFDecoderModule.h b/dom/media/platforms/wmf/WMFDecoderModule.h
index cd7b8c660..6582f8056 100644
--- a/dom/media/platforms/wmf/WMFDecoderModule.h
+++ b/dom/media/platforms/wmf/WMFDecoderModule.h
@@ -40,10 +40,8 @@ public:
static int GetNumDecoderThreads();
// Accessors that report whether we have the required MFTs available
- // on the system to play various codecs. Windows Vista doesn't have the
- // H.264/AAC decoders if the "Platform Update Supplement for Windows Vista"
- // is not installed, and Window N and KN variants also require a "Media
- // Feature Pack" to be installed. Windows XP doesn't have WMF.
+ // on the system to play various codecs. Windows N and KN variants
+ // require a "Media Feature Pack" to be installed.
static bool HasAAC();
static bool HasH264();
diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.h b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
index a4dd49f56..75571d61e 100644
--- a/dom/media/platforms/wmf/WMFMediaDataDecoder.h
+++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
@@ -33,7 +33,7 @@ public:
// Returns S_OK on success, or MF_E_TRANSFORM_NEED_MORE_INPUT if there's not
// enough data to produce more output. If this returns a failure code other
// than MF_E_TRANSFORM_NEED_MORE_INPUT, an error will be reported to the
- // MP4Reader.
+ // MP4Demuxer.
virtual HRESULT Output(int64_t aStreamOffset,
RefPtr<MediaData>& aOutput) = 0;
diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
index 8a51f817a..a7633a7de 100644
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -27,7 +27,6 @@
#include "mozilla/WindowsVersion.h"
#include "mozilla/Telemetry.h"
#include "nsPrintfCString.h"
-#include "MediaTelemetryConstants.h"
#include "GMPUtils.h" // For SplitAt. TODO: Move SplitAt to a central place.
#include "MP4Decoder.h"
#include "VPXDecoder.h"
diff --git a/dom/media/test/manifest.js b/dom/media/test/manifest.js
index 7e30cc97d..52e53a271 100644
--- a/dom/media/test/manifest.js
+++ b/dom/media/test/manifest.js
@@ -266,10 +266,10 @@ var gPlayTests = [
{ name:"small-shot.mp3", type:"audio/mpeg", duration:0.27 },
{ name:"owl.mp3", type:"audio/mpeg", duration:3.343 },
// owl.mp3 as above, but with something funny going on in the ID3v2 tag
- // that causes DirectShow to fail.
+ // that caused DirectShow to fail.
{ name:"owl-funny-id3.mp3", type:"audio/mpeg", duration:3.343 },
// owl.mp3 as above, but with something even funnier going on in the ID3v2 tag
- // that causes DirectShow to fail.
+ // that caused DirectShow to fail.
{ name:"owl-funnier-id3.mp3", type:"audio/mpeg", duration:3.343 },
// One second of silence with ~140KB of ID3 tags. Usually when the first MP3
// frame is at such a high offset into the file, MP3FrameParser will give up
diff --git a/dom/media/test/test_can_play_type_mpeg.html b/dom/media/test/test_can_play_type_mpeg.html
index 89e5fabef..514b5cc2f 100644
--- a/dom/media/test/test_can_play_type_mpeg.html
+++ b/dom/media/test/test_can_play_type_mpeg.html
@@ -151,8 +151,7 @@ var haveMp4 = (getPref("media.wmf.enabled") && IsWindowsVistaOrLater()) ||
check_mp4(document.getElementById('v'), haveMp4);
-var haveMp3 = getPref("media.directshow.enabled") ||
- (getPref("media.wmf.enabled") && IsWindowsVistaOrLater()) ||
+var haveMp3 = getPref("media.wmf.enabled") ||
(IsLinux() && getPref("media.ffmpeg.enabled")) ||
(IsSupportedAndroid() &&
((IsJellyBeanOrLater() && getPref("media.android-media-codec.enabled")) ||
diff --git a/dom/moz.build b/dom/moz.build
index 358fc6411..54dc0510e 100644
--- a/dom/moz.build
+++ b/dom/moz.build
@@ -94,7 +94,6 @@ DIRS += [
'xslt',
'xul',
'manifest',
- 'vr',
'u2f',
'console',
'performance',
diff --git a/dom/vr/VRDisplay.cpp b/dom/vr/VRDisplay.cpp
deleted file mode 100644
index 80922422f..000000000
--- a/dom/vr/VRDisplay.cpp
+++ /dev/null
@@ -1,802 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "nsWrapperCache.h"
-
-#include "mozilla/dom/Element.h"
-#include "mozilla/dom/ElementBinding.h"
-#include "mozilla/dom/Promise.h"
-#include "mozilla/dom/VRDisplay.h"
-#include "mozilla/HoldDropJSObjects.h"
-#include "mozilla/dom/VRDisplayBinding.h"
-#include "Navigator.h"
-#include "gfxVR.h"
-#include "VRDisplayClient.h"
-#include "VRManagerChild.h"
-#include "VRDisplayPresentation.h"
-#include "nsIObserverService.h"
-#include "nsIFrame.h"
-#include "nsISupportsPrimitives.h"
-
-using namespace mozilla::gfx;
-
-namespace mozilla {
-namespace dom {
-
-VRFieldOfView::VRFieldOfView(nsISupports* aParent,
- double aUpDegrees, double aRightDegrees,
- double aDownDegrees, double aLeftDegrees)
- : mParent(aParent)
- , mUpDegrees(aUpDegrees)
- , mRightDegrees(aRightDegrees)
- , mDownDegrees(aDownDegrees)
- , mLeftDegrees(aLeftDegrees)
-{
-}
-
-VRFieldOfView::VRFieldOfView(nsISupports* aParent, const gfx::VRFieldOfView& aSrc)
- : mParent(aParent)
- , mUpDegrees(aSrc.upDegrees)
- , mRightDegrees(aSrc.rightDegrees)
- , mDownDegrees(aSrc.downDegrees)
- , mLeftDegrees(aSrc.leftDegrees)
-{
-}
-
-bool
-VRDisplayCapabilities::HasPosition() const
-{
- return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_Position);
-}
-
-bool
-VRDisplayCapabilities::HasOrientation() const
-{
- return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_Orientation);
-}
-
-bool
-VRDisplayCapabilities::HasExternalDisplay() const
-{
- return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_External);
-}
-
-bool
-VRDisplayCapabilities::CanPresent() const
-{
- return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_Present);
-}
-
-uint32_t
-VRDisplayCapabilities::MaxLayers() const
-{
- return CanPresent() ? 1 : 0;
-}
-
-/*static*/ bool
-VRDisplay::RefreshVRDisplays(uint64_t aWindowId)
-{
- gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
- return vm && vm->RefreshVRDisplaysWithCallback(aWindowId);
-}
-
-/*static*/ void
-VRDisplay::UpdateVRDisplays(nsTArray<RefPtr<VRDisplay>>& aDisplays, nsPIDOMWindowInner* aWindow)
-{
- nsTArray<RefPtr<VRDisplay>> displays;
-
- gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
- nsTArray<RefPtr<gfx::VRDisplayClient>> updatedDisplays;
- if (vm && vm->GetVRDisplays(updatedDisplays)) {
- for (size_t i = 0; i < updatedDisplays.Length(); i++) {
- RefPtr<gfx::VRDisplayClient> display = updatedDisplays[i];
- bool isNewDisplay = true;
- for (size_t j = 0; j < aDisplays.Length(); j++) {
- if (aDisplays[j]->GetClient()->GetDisplayInfo() == display->GetDisplayInfo()) {
- displays.AppendElement(aDisplays[j]);
- isNewDisplay = false;
- }
- }
-
- if (isNewDisplay) {
- displays.AppendElement(new VRDisplay(aWindow, display));
- }
- }
- }
-
- aDisplays = displays;
-}
-
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRFieldOfView, mParent)
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRFieldOfView, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRFieldOfView, Release)
-
-
-JSObject*
-VRFieldOfView::WrapObject(JSContext* aCx,
- JS::Handle<JSObject*> aGivenProto)
-{
- return VRFieldOfViewBinding::Wrap(aCx, this, aGivenProto);
-}
-
-NS_IMPL_CYCLE_COLLECTION_CLASS(VREyeParameters)
-
-NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VREyeParameters)
- NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent, mFOV)
- NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
- tmp->mOffset = nullptr;
-NS_IMPL_CYCLE_COLLECTION_UNLINK_END
-
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VREyeParameters)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent, mFOV)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
-
-NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(VREyeParameters)
- NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mOffset)
-NS_IMPL_CYCLE_COLLECTION_TRACE_END
-
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VREyeParameters, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VREyeParameters, Release)
-
-VREyeParameters::VREyeParameters(nsISupports* aParent,
- const gfx::Point3D& aEyeTranslation,
- const gfx::VRFieldOfView& aFOV,
- const gfx::IntSize& aRenderSize)
- : mParent(aParent)
- , mEyeTranslation(aEyeTranslation)
- , mRenderSize(aRenderSize)
-{
- mFOV = new VRFieldOfView(aParent, aFOV);
- mozilla::HoldJSObjects(this);
-}
-
-VREyeParameters::~VREyeParameters()
-{
- mozilla::DropJSObjects(this);
-}
-
-VRFieldOfView*
-VREyeParameters::FieldOfView()
-{
- return mFOV;
-}
-
-void
-VREyeParameters::GetOffset(JSContext* aCx, JS::MutableHandle<JSObject*> aRetval, ErrorResult& aRv)
-{
- if (!mOffset) {
- // Lazily create the Float32Array
- mOffset = dom::Float32Array::Create(aCx, this, 3, mEyeTranslation.components);
- if (!mOffset) {
- aRv.NoteJSContextException(aCx);
- return;
- }
- }
- aRetval.set(mOffset);
-}
-
-JSObject*
-VREyeParameters::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return VREyeParametersBinding::Wrap(aCx, this, aGivenProto);
-}
-
-VRStageParameters::VRStageParameters(nsISupports* aParent,
- const gfx::Matrix4x4& aSittingToStandingTransform,
- const gfx::Size& aSize)
- : mParent(aParent)
- , mSittingToStandingTransform(aSittingToStandingTransform)
- , mSittingToStandingTransformArray(nullptr)
- , mSize(aSize)
-{
- mozilla::HoldJSObjects(this);
-}
-
-VRStageParameters::~VRStageParameters()
-{
- mozilla::DropJSObjects(this);
-}
-
-JSObject*
-VRStageParameters::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return VRStageParametersBinding::Wrap(aCx, this, aGivenProto);
-}
-
-NS_IMPL_CYCLE_COLLECTION_CLASS(VRStageParameters)
-
-NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VRStageParameters)
- NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent)
- NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
- tmp->mSittingToStandingTransformArray = nullptr;
-NS_IMPL_CYCLE_COLLECTION_UNLINK_END
-
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VRStageParameters)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
-
-NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(VRStageParameters)
- NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mSittingToStandingTransformArray)
-NS_IMPL_CYCLE_COLLECTION_TRACE_END
-
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRStageParameters, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRStageParameters, Release)
-
-void
-VRStageParameters::GetSittingToStandingTransform(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- if (!mSittingToStandingTransformArray) {
- // Lazily create the Float32Array
- mSittingToStandingTransformArray = dom::Float32Array::Create(aCx, this, 16,
- mSittingToStandingTransform.components);
- if (!mSittingToStandingTransformArray) {
- aRv.NoteJSContextException(aCx);
- return;
- }
- }
- aRetval.set(mSittingToStandingTransformArray);
-}
-
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRDisplayCapabilities, mParent)
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRDisplayCapabilities, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRDisplayCapabilities, Release)
-
-JSObject*
-VRDisplayCapabilities::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return VRDisplayCapabilitiesBinding::Wrap(aCx, this, aGivenProto);
-}
-
-VRPose::VRPose(nsISupports* aParent, const gfx::VRHMDSensorState& aState)
- : Pose(aParent)
- , mVRState(aState)
-{
- mFrameId = aState.inputFrameID;
- mozilla::HoldJSObjects(this);
-}
-
-VRPose::VRPose(nsISupports* aParent)
- : Pose(aParent)
-{
- mFrameId = 0;
- mVRState.Clear();
- mozilla::HoldJSObjects(this);
-}
-
-VRPose::~VRPose()
-{
- mozilla::DropJSObjects(this);
-}
-
-void
-VRPose::GetPosition(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- SetFloat32Array(aCx, aRetval, mPosition, mVRState.position, 3,
- !mPosition && bool(mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position),
- aRv);
-}
-
-void
-VRPose::GetLinearVelocity(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- SetFloat32Array(aCx, aRetval, mLinearVelocity, mVRState.linearVelocity, 3,
- !mLinearVelocity && bool(mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position),
- aRv);
-}
-
-void
-VRPose::GetLinearAcceleration(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- SetFloat32Array(aCx, aRetval, mLinearAcceleration, mVRState.linearAcceleration, 3,
- !mLinearAcceleration && bool(mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_LinearAcceleration),
- aRv);
-
-}
-
-void
-VRPose::GetOrientation(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- SetFloat32Array(aCx, aRetval, mOrientation, mVRState.orientation, 4,
- !mOrientation && bool(mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation),
- aRv);
-}
-
-void
-VRPose::GetAngularVelocity(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- SetFloat32Array(aCx, aRetval, mAngularVelocity, mVRState.angularVelocity, 3,
- !mAngularVelocity && bool(mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation),
- aRv);
-}
-
-void
-VRPose::GetAngularAcceleration(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- SetFloat32Array(aCx, aRetval, mAngularAcceleration, mVRState.angularAcceleration, 3,
- !mAngularAcceleration && bool(mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_AngularAcceleration),
- aRv);
-}
-
-JSObject*
-VRPose::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return VRPoseBinding::Wrap(aCx, this, aGivenProto);
-}
-
-/* virtual */ JSObject*
-VRDisplay::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return VRDisplayBinding::Wrap(aCx, this, aGivenProto);
-}
-
-VRDisplay::VRDisplay(nsPIDOMWindowInner* aWindow, gfx::VRDisplayClient* aClient)
- : DOMEventTargetHelper(aWindow)
- , mClient(aClient)
- , mDepthNear(0.01f) // Default value from WebVR Spec
- , mDepthFar(10000.0f) // Default value from WebVR Spec
-{
- const gfx::VRDisplayInfo& info = aClient->GetDisplayInfo();
- mDisplayId = info.GetDisplayID();
- mDisplayName = NS_ConvertASCIItoUTF16(info.GetDisplayName());
- mCapabilities = new VRDisplayCapabilities(aWindow, info.GetCapabilities());
- if (info.GetCapabilities() & gfx::VRDisplayCapabilityFlags::Cap_StageParameters) {
- mStageParameters = new VRStageParameters(aWindow,
- info.GetSittingToStandingTransform(),
- info.GetStageSize());
- }
- mozilla::HoldJSObjects(this);
-}
-
-VRDisplay::~VRDisplay()
-{
- ExitPresentInternal();
- mozilla::DropJSObjects(this);
-}
-
-void
-VRDisplay::LastRelease()
-{
- // We don't want to wait for the CC to free up the presentation
- // for use in other documents, so we do this in LastRelease().
- ExitPresentInternal();
-}
-
-already_AddRefed<VREyeParameters>
-VRDisplay::GetEyeParameters(VREye aEye)
-{
- gfx::VRDisplayInfo::Eye eye = aEye == VREye::Left ? gfx::VRDisplayInfo::Eye_Left : gfx::VRDisplayInfo::Eye_Right;
- RefPtr<VREyeParameters> params =
- new VREyeParameters(GetParentObject(),
- mClient->GetDisplayInfo().GetEyeTranslation(eye),
- mClient->GetDisplayInfo().GetEyeFOV(eye),
- mClient->GetDisplayInfo().SuggestedEyeResolution());
- return params.forget();
-}
-
-VRDisplayCapabilities*
-VRDisplay::Capabilities()
-{
- return mCapabilities;
-}
-
-VRStageParameters*
-VRDisplay::GetStageParameters()
-{
- return mStageParameters;
-}
-
-void
-VRDisplay::UpdateFrameInfo()
-{
- /**
- * The WebVR 1.1 spec Requires that VRDisplay.getPose and VRDisplay.getFrameData
- * must return the same values until the next VRDisplay.submitFrame.
- *
- * mFrameInfo is marked dirty at the end of the frame or start of a new
- * composition and lazily created here in order to receive mid-frame
- * pose-prediction updates while still ensuring conformance to the WebVR spec
- * requirements.
- *
- * If we are not presenting WebVR content, the frame will never end and we should
- * return the latest frame data always.
- */
- if (mFrameInfo.IsDirty() || !mPresentation) {
- gfx::VRHMDSensorState state = mClient->GetSensorState();
- const gfx::VRDisplayInfo& info = mClient->GetDisplayInfo();
- mFrameInfo.Update(info, state, mDepthNear, mDepthFar);
- }
-}
-
-bool
-VRDisplay::GetFrameData(VRFrameData& aFrameData)
-{
- UpdateFrameInfo();
- aFrameData.Update(mFrameInfo);
- return true;
-}
-
-already_AddRefed<VRPose>
-VRDisplay::GetPose()
-{
- UpdateFrameInfo();
- RefPtr<VRPose> obj = new VRPose(GetParentObject(), mFrameInfo.mVRState);
-
- return obj.forget();
-}
-
-void
-VRDisplay::ResetPose()
-{
- mClient->ZeroSensor();
-}
-
-already_AddRefed<Promise>
-VRDisplay::RequestPresent(const nsTArray<VRLayer>& aLayers, ErrorResult& aRv)
-{
- nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(GetParentObject());
- if (!global) {
- aRv.Throw(NS_ERROR_FAILURE);
- return nullptr;
- }
-
- RefPtr<Promise> promise = Promise::Create(global, aRv);
- NS_ENSURE_TRUE(!aRv.Failed(), nullptr);
-
- nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
- NS_ENSURE_TRUE(obs, nullptr);
-
- if (mClient->GetIsPresenting()) {
- // Only one presentation allowed per VRDisplay
- // on a first-come-first-serve basis.
- promise->MaybeRejectWithUndefined();
- } else {
- mPresentation = mClient->BeginPresentation(aLayers);
- mFrameInfo.Clear();
-
- nsresult rv = obs->AddObserver(this, "inner-window-destroyed", false);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- mPresentation = nullptr;
- promise->MaybeRejectWithUndefined();
- } else {
- promise->MaybeResolve(JS::UndefinedHandleValue);
- }
- }
- return promise.forget();
-}
-
-NS_IMETHODIMP
-VRDisplay::Observe(nsISupports* aSubject, const char* aTopic,
- const char16_t* aData)
-{
- MOZ_ASSERT(NS_IsMainThread());
-
- if (strcmp(aTopic, "inner-window-destroyed") == 0) {
- nsCOMPtr<nsISupportsPRUint64> wrapper = do_QueryInterface(aSubject);
- NS_ENSURE_TRUE(wrapper, NS_ERROR_FAILURE);
-
- uint64_t innerID;
- nsresult rv = wrapper->GetData(&innerID);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (!GetOwner() || GetOwner()->WindowID() == innerID) {
- ExitPresentInternal();
- }
-
- return NS_OK;
- }
-
- // This should not happen.
- return NS_ERROR_FAILURE;
-}
-
-already_AddRefed<Promise>
-VRDisplay::ExitPresent(ErrorResult& aRv)
-{
- nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(GetParentObject());
- if (!global) {
- aRv.Throw(NS_ERROR_FAILURE);
- return nullptr;
- }
-
-
- RefPtr<Promise> promise = Promise::Create(global, aRv);
- NS_ENSURE_TRUE(!aRv.Failed(), nullptr);
-
- if (!IsPresenting()) {
- // We can not exit a presentation outside of the context that
- // started the presentation.
- promise->MaybeRejectWithUndefined();
- } else {
- promise->MaybeResolve(JS::UndefinedHandleValue);
- ExitPresentInternal();
- }
-
- return promise.forget();
-}
-
-void
-VRDisplay::ExitPresentInternal()
-{
- mPresentation = nullptr;
-}
-
-void
-VRDisplay::GetLayers(nsTArray<VRLayer>& result)
-{
- if (mPresentation) {
- mPresentation->GetDOMLayers(result);
- } else {
- result = nsTArray<VRLayer>();
- }
-}
-
-void
-VRDisplay::SubmitFrame()
-{
- if (mPresentation) {
- mPresentation->SubmitFrame();
- }
- mFrameInfo.Clear();
-}
-
-int32_t
-VRDisplay::RequestAnimationFrame(FrameRequestCallback& aCallback,
-ErrorResult& aError)
-{
- gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
-
- int32_t handle;
- aError = vm->ScheduleFrameRequestCallback(aCallback, &handle);
- return handle;
-}
-
-void
-VRDisplay::CancelAnimationFrame(int32_t aHandle, ErrorResult& aError)
-{
- gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
- vm->CancelFrameRequestCallback(aHandle);
-}
-
-
-bool
-VRDisplay::IsPresenting() const
-{
- // IsPresenting returns true only if this Javascript context is presenting
- // and will return false if another context is presenting.
- return mPresentation != nullptr;
-}
-
-bool
-VRDisplay::IsConnected() const
-{
- return mClient->GetIsConnected();
-}
-
-NS_IMPL_CYCLE_COLLECTION_INHERITED(VRDisplay, DOMEventTargetHelper, mCapabilities, mStageParameters)
-
-NS_IMPL_ADDREF_INHERITED(VRDisplay, DOMEventTargetHelper)
-NS_IMPL_RELEASE_INHERITED(VRDisplay, DOMEventTargetHelper)
-
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(VRDisplay)
-NS_INTERFACE_MAP_ENTRY(nsIObserver)
-NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, DOMEventTargetHelper)
-NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
-
-NS_IMPL_CYCLE_COLLECTION_CLASS(VRFrameData)
-
-NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VRFrameData)
- NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent, mPose)
- NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
- tmp->mLeftProjectionMatrix = nullptr;
- tmp->mLeftViewMatrix = nullptr;
- tmp->mRightProjectionMatrix = nullptr;
- tmp->mRightViewMatrix = nullptr;
-NS_IMPL_CYCLE_COLLECTION_UNLINK_END
-
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VRFrameData)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent, mPose)
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
-NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
-
-NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(VRFrameData)
- NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mLeftProjectionMatrix)
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mLeftViewMatrix)
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mRightProjectionMatrix)
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mRightViewMatrix)
-NS_IMPL_CYCLE_COLLECTION_TRACE_END
-
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRFrameData, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRFrameData, Release)
-
-VRFrameData::VRFrameData(nsISupports* aParent)
- : mParent(aParent)
- , mLeftProjectionMatrix(nullptr)
- , mLeftViewMatrix(nullptr)
- , mRightProjectionMatrix(nullptr)
- , mRightViewMatrix(nullptr)
-{
- mozilla::HoldJSObjects(this);
- mPose = new VRPose(aParent);
-}
-
-VRFrameData::~VRFrameData()
-{
- mozilla::DropJSObjects(this);
-}
-
-/* static */ already_AddRefed<VRFrameData>
-VRFrameData::Constructor(const GlobalObject& aGlobal, ErrorResult& aRv)
-{
- RefPtr<VRFrameData> obj = new VRFrameData(aGlobal.GetAsSupports());
- return obj.forget();
-}
-
-JSObject*
-VRFrameData::WrapObject(JSContext* aCx,
- JS::Handle<JSObject*> aGivenProto)
-{
- return VRFrameDataBinding::Wrap(aCx, this, aGivenProto);
-}
-
-VRPose*
-VRFrameData::Pose()
-{
- return mPose;
-}
-
-void
-VRFrameData::LazyCreateMatrix(JS::Heap<JSObject*>& aArray, gfx::Matrix4x4& aMat, JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval, ErrorResult& aRv)
-{
- if (!aArray) {
- // Lazily create the Float32Array
- aArray = dom::Float32Array::Create(aCx, this, 16, aMat.components);
- if (!aArray) {
- aRv.NoteJSContextException(aCx);
- return;
- }
- }
- if (aArray) {
- JS::ExposeObjectToActiveJS(aArray);
- }
- aRetval.set(aArray);
-}
-
-double
-VRFrameData::Timestamp() const
-{
- // Converting from seconds to milliseconds
- return mFrameInfo.mVRState.timestamp * 1000.0f;
-}
-
-void
-VRFrameData::GetLeftProjectionMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- LazyCreateMatrix(mLeftProjectionMatrix, mFrameInfo.mLeftProjection, aCx,
- aRetval, aRv);
-}
-
-void
-VRFrameData::GetLeftViewMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- LazyCreateMatrix(mLeftViewMatrix, mFrameInfo.mLeftView, aCx, aRetval, aRv);
-}
-
-void
-VRFrameData::GetRightProjectionMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- LazyCreateMatrix(mRightProjectionMatrix, mFrameInfo.mRightProjection, aCx,
- aRetval, aRv);
-}
-
-void
-VRFrameData::GetRightViewMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv)
-{
- LazyCreateMatrix(mRightViewMatrix, mFrameInfo.mRightView, aCx, aRetval, aRv);
-}
-
-void
-VRFrameData::Update(const VRFrameInfo& aFrameInfo)
-{
- mFrameInfo = aFrameInfo;
-
- mLeftProjectionMatrix = nullptr;
- mLeftViewMatrix = nullptr;
- mRightProjectionMatrix = nullptr;
- mRightViewMatrix = nullptr;
-
- mPose = new VRPose(GetParentObject(), mFrameInfo.mVRState);
-}
-
-void
-VRFrameInfo::Update(const gfx::VRDisplayInfo& aInfo,
- const gfx::VRHMDSensorState& aState,
- float aDepthNear,
- float aDepthFar)
-{
- mVRState = aState;
-
- gfx::Quaternion qt;
- if (mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation) {
- qt.x = mVRState.orientation[0];
- qt.y = mVRState.orientation[1];
- qt.z = mVRState.orientation[2];
- qt.w = mVRState.orientation[3];
- }
- gfx::Point3D pos;
- if (mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position) {
- pos.x = -mVRState.position[0];
- pos.y = -mVRState.position[1];
- pos.z = -mVRState.position[2];
- }
- gfx::Matrix4x4 matHead;
- matHead.SetRotationFromQuaternion(qt);
- matHead.PreTranslate(pos);
-
- mLeftView = matHead;
- mLeftView.PostTranslate(-aInfo.mEyeTranslation[gfx::VRDisplayInfo::Eye_Left]);
-
- mRightView = matHead;
- mRightView.PostTranslate(-aInfo.mEyeTranslation[gfx::VRDisplayInfo::Eye_Right]);
-
- // Avoid division by zero within ConstructProjectionMatrix
- const float kEpsilon = 0.00001f;
- if (fabs(aDepthFar - aDepthNear) < kEpsilon) {
- aDepthFar = aDepthNear + kEpsilon;
- }
-
- const gfx::VRFieldOfView leftFOV = aInfo.mEyeFOV[gfx::VRDisplayInfo::Eye_Left];
- mLeftProjection = leftFOV.ConstructProjectionMatrix(aDepthNear, aDepthFar, true);
- const gfx::VRFieldOfView rightFOV = aInfo.mEyeFOV[gfx::VRDisplayInfo::Eye_Right];
- mRightProjection = rightFOV.ConstructProjectionMatrix(aDepthNear, aDepthFar, true);
-}
-
-VRFrameInfo::VRFrameInfo()
-{
- mVRState.Clear();
-}
-
-bool
-VRFrameInfo::IsDirty()
-{
- return mVRState.timestamp == 0;
-}
-
-void
-VRFrameInfo::Clear()
-{
- mVRState.Clear();
-}
-
-} // namespace dom
-} // namespace mozilla
diff --git a/dom/vr/VRDisplay.h b/dom/vr/VRDisplay.h
deleted file mode 100644
index d40d3d8ac..000000000
--- a/dom/vr/VRDisplay.h
+++ /dev/null
@@ -1,362 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef mozilla_dom_VRDisplay_h_
-#define mozilla_dom_VRDisplay_h_
-
-#include <stdint.h>
-
-#include "mozilla/ErrorResult.h"
-#include "mozilla/dom/TypedArray.h"
-#include "mozilla/dom/VRDisplayBinding.h"
-#include "mozilla/DOMEventTargetHelper.h"
-#include "mozilla/dom/DOMPoint.h"
-#include "mozilla/dom/DOMRect.h"
-#include "mozilla/dom/Pose.h"
-
-#include "nsCOMPtr.h"
-#include "nsString.h"
-#include "nsTArray.h"
-
-#include "gfxVR.h"
-
-namespace mozilla {
-namespace gfx {
-class VRDisplayClient;
-class VRDisplayPresentation;
-struct VRFieldOfView;
-enum class VRDisplayCapabilityFlags : uint16_t;
-struct VRHMDSensorState;
-}
-namespace dom {
-class Navigator;
-
-class VRFieldOfView final : public nsWrapperCache
-{
-public:
- VRFieldOfView(nsISupports* aParent,
- double aUpDegrees, double aRightDegrees,
- double aDownDegrees, double aLeftDegrees);
- VRFieldOfView(nsISupports* aParent, const gfx::VRFieldOfView& aSrc);
-
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRFieldOfView)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRFieldOfView)
-
- double UpDegrees() const { return mUpDegrees; }
- double RightDegrees() const { return mRightDegrees; }
- double DownDegrees() const { return mDownDegrees; }
- double LeftDegrees() const { return mLeftDegrees; }
-
- nsISupports* GetParentObject() const { return mParent; }
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-
-protected:
- virtual ~VRFieldOfView() {}
-
- nsCOMPtr<nsISupports> mParent;
-
- double mUpDegrees;
- double mRightDegrees;
- double mDownDegrees;
- double mLeftDegrees;
-};
-
-class VRDisplayCapabilities final : public nsWrapperCache
-{
-public:
- VRDisplayCapabilities(nsISupports* aParent, const gfx::VRDisplayCapabilityFlags& aFlags)
- : mParent(aParent)
- , mFlags(aFlags)
- {
- }
-
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRDisplayCapabilities)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRDisplayCapabilities)
-
- nsISupports* GetParentObject() const
- {
- return mParent;
- }
-
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-
- bool HasPosition() const;
- bool HasOrientation() const;
- bool HasExternalDisplay() const;
- bool CanPresent() const;
- uint32_t MaxLayers() const;
-
-protected:
- ~VRDisplayCapabilities() {}
- nsCOMPtr<nsISupports> mParent;
- gfx::VRDisplayCapabilityFlags mFlags;
-};
-
-class VRPose final : public Pose
-{
-
-public:
- VRPose(nsISupports* aParent, const gfx::VRHMDSensorState& aState);
- explicit VRPose(nsISupports* aParent);
-
- uint32_t FrameID() const { return mFrameId; }
-
- virtual void GetPosition(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv) override;
- virtual void GetLinearVelocity(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv) override;
- virtual void GetLinearAcceleration(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv) override;
- virtual void GetOrientation(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv) override;
- virtual void GetAngularVelocity(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv) override;
- virtual void GetAngularAcceleration(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv) override;
-
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-
-protected:
- ~VRPose();
-
- uint32_t mFrameId;
- gfx::VRHMDSensorState mVRState;
-};
-
-struct VRFrameInfo
-{
- VRFrameInfo();
-
- void Update(const gfx::VRDisplayInfo& aInfo,
- const gfx::VRHMDSensorState& aState,
- float aDepthNear,
- float aDepthFar);
-
- void Clear();
- bool IsDirty();
-
- gfx::VRHMDSensorState mVRState;
- gfx::Matrix4x4 mLeftProjection;
- gfx::Matrix4x4 mLeftView;
- gfx::Matrix4x4 mRightProjection;
- gfx::Matrix4x4 mRightView;
-
-};
-
-class VRFrameData final : public nsWrapperCache
-{
-public:
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRFrameData)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRFrameData)
-
- explicit VRFrameData(nsISupports* aParent);
- static already_AddRefed<VRFrameData> Constructor(const GlobalObject& aGlobal,
- ErrorResult& aRv);
-
- void Update(const VRFrameInfo& aFrameInfo);
-
- // WebIDL Members
- double Timestamp() const;
- void GetLeftProjectionMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv);
- void GetLeftViewMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv);
- void GetRightProjectionMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv);
- void GetRightViewMatrix(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv);
-
- VRPose* Pose();
-
- // WebIDL Boilerplate
- nsISupports* GetParentObject() const { return mParent; }
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-
-protected:
- ~VRFrameData();
- nsCOMPtr<nsISupports> mParent;
-
- VRFrameInfo mFrameInfo;
- RefPtr<VRPose> mPose;
- JS::Heap<JSObject*> mLeftProjectionMatrix;
- JS::Heap<JSObject*> mLeftViewMatrix;
- JS::Heap<JSObject*> mRightProjectionMatrix;
- JS::Heap<JSObject*> mRightViewMatrix;
-
- void LazyCreateMatrix(JS::Heap<JSObject*>& aArray, gfx::Matrix4x4& aMat,
- JSContext* aCx, JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv);
-};
-
-class VRStageParameters final : public nsWrapperCache
-{
-public:
- VRStageParameters(nsISupports* aParent,
- const gfx::Matrix4x4& aSittingToStandingTransform,
- const gfx::Size& aSize);
-
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRStageParameters)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRStageParameters)
-
- void GetSittingToStandingTransform(JSContext* aCx,
- JS::MutableHandle<JSObject*> aRetval,
- ErrorResult& aRv);
- float SizeX() const { return mSize.width; }
- float SizeZ() const { return mSize.height; }
-
- nsISupports* GetParentObject() const { return mParent; }
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-
-protected:
- ~VRStageParameters();
-
- nsCOMPtr<nsISupports> mParent;
-
- gfx::Matrix4x4 mSittingToStandingTransform;
- JS::Heap<JSObject*> mSittingToStandingTransformArray;
- gfx::Size mSize;
-};
-
-class VREyeParameters final : public nsWrapperCache
-{
-public:
- VREyeParameters(nsISupports* aParent,
- const gfx::Point3D& aEyeTranslation,
- const gfx::VRFieldOfView& aFOV,
- const gfx::IntSize& aRenderSize);
-
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VREyeParameters)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VREyeParameters)
-
- void GetOffset(JSContext* aCx, JS::MutableHandle<JSObject*> aRetVal,
- ErrorResult& aRv);
-
- VRFieldOfView* FieldOfView();
-
- uint32_t RenderWidth() const { return mRenderSize.width; }
- uint32_t RenderHeight() const { return mRenderSize.height; }
-
- nsISupports* GetParentObject() const { return mParent; }
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-protected:
- ~VREyeParameters();
-
- nsCOMPtr<nsISupports> mParent;
-
-
- gfx::Point3D mEyeTranslation;
- gfx::IntSize mRenderSize;
- JS::Heap<JSObject*> mOffset;
- RefPtr<VRFieldOfView> mFOV;
-};
-
-class VRDisplay final : public DOMEventTargetHelper
- , public nsIObserver
-{
-public:
- NS_DECL_ISUPPORTS_INHERITED
- NS_DECL_NSIOBSERVER
- NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(VRDisplay, DOMEventTargetHelper)
-
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-
- bool IsPresenting() const;
- bool IsConnected() const;
-
- VRDisplayCapabilities* Capabilities();
- VRStageParameters* GetStageParameters();
-
- uint32_t DisplayId() const { return mDisplayId; }
- void GetDisplayName(nsAString& aDisplayName) const { aDisplayName = mDisplayName; }
-
- static bool RefreshVRDisplays(uint64_t aWindowId);
- static void UpdateVRDisplays(nsTArray<RefPtr<VRDisplay> >& aDisplays,
- nsPIDOMWindowInner* aWindow);
-
- gfx::VRDisplayClient *GetClient() {
- return mClient;
- }
-
- virtual already_AddRefed<VREyeParameters> GetEyeParameters(VREye aEye);
-
- bool GetFrameData(VRFrameData& aFrameData);
- already_AddRefed<VRPose> GetPose();
- void ResetPose();
-
- double DepthNear() {
- return mDepthNear;
- }
-
- double DepthFar() {
- return mDepthFar;
- }
-
- void SetDepthNear(double aDepthNear) {
- // XXX When we start sending depth buffers to VRLayer's we will want
- // to communicate this with the VRDisplayHost
- mDepthNear = aDepthNear;
- }
-
- void SetDepthFar(double aDepthFar) {
- // XXX When we start sending depth buffers to VRLayer's we will want
- // to communicate this with the VRDisplayHost
- mDepthFar = aDepthFar;
- }
-
- already_AddRefed<Promise> RequestPresent(const nsTArray<VRLayer>& aLayers, ErrorResult& aRv);
- already_AddRefed<Promise> ExitPresent(ErrorResult& aRv);
- void GetLayers(nsTArray<VRLayer>& result);
- void SubmitFrame();
-
- int32_t RequestAnimationFrame(mozilla::dom::FrameRequestCallback& aCallback,
- mozilla::ErrorResult& aError);
- void CancelAnimationFrame(int32_t aHandle, mozilla::ErrorResult& aError);
-
-protected:
- VRDisplay(nsPIDOMWindowInner* aWindow, gfx::VRDisplayClient* aClient);
- virtual ~VRDisplay();
- virtual void LastRelease() override;
-
- void ExitPresentInternal();
- void UpdateFrameInfo();
-
- RefPtr<gfx::VRDisplayClient> mClient;
-
- uint32_t mDisplayId;
- nsString mDisplayName;
-
- RefPtr<VRDisplayCapabilities> mCapabilities;
- RefPtr<VRStageParameters> mStageParameters;
-
- double mDepthNear;
- double mDepthFar;
-
- RefPtr<gfx::VRDisplayPresentation> mPresentation;
-
- /**
- * The WebVR 1.1 spec Requires that VRDisplay.getPose and VRDisplay.getFrameData
- * must return the same values until the next VRDisplay.submitFrame.
- * mFrameInfo is updated only on the first call to either function within one
- * frame. Subsequent calls before the next SubmitFrame or ExitPresent call
- * will use these cached values.
- */
- VRFrameInfo mFrameInfo;
-};
-
-} // namespace dom
-} // namespace mozilla
-
-#endif
diff --git a/dom/vr/VREventObserver.cpp b/dom/vr/VREventObserver.cpp
deleted file mode 100644
index 1b6d1b978..000000000
--- a/dom/vr/VREventObserver.cpp
+++ /dev/null
@@ -1,79 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
-* License, v. 2.0. If a copy of the MPL was not distributed with this
-* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "VREventObserver.h"
-
-#include "nsContentUtils.h"
-#include "nsGlobalWindow.h"
-#include "VRManagerChild.h"
-
-namespace mozilla {
-namespace dom {
-
-using namespace gfx;
-
-/**
- * This class is used by nsGlobalWindow to implement window.onvrdisplayconnected,
- * window.onvrdisplaydisconnected, and window.onvrdisplaypresentchange.
- */
-VREventObserver::VREventObserver(nsGlobalWindow* aGlobalWindow)
- : mWindow(aGlobalWindow)
-{
- MOZ_ASSERT(aGlobalWindow && aGlobalWindow->IsInnerWindow());
-
- VRManagerChild* vmc = VRManagerChild::Get();
- if (vmc) {
- vmc->AddListener(this);
- }
-}
-
-VREventObserver::~VREventObserver()
-{
- VRManagerChild* vmc = VRManagerChild::Get();
- if (vmc) {
- vmc->RemoveListener(this);
- }
-}
-
-void
-VREventObserver::NotifyVRDisplayConnect()
-{
- /**
- * We do not call nsGlobalWindow::NotifyActiveVRDisplaysChanged here, as we
- * can assume that a newly enumerated display is not presenting WebVR
- * content.
- */
- if (mWindow->AsInner()->IsCurrentInnerWindow()) {
- MOZ_ASSERT(nsContentUtils::IsSafeToRunScript());
- mWindow->GetOuterWindow()->DispatchCustomEvent(
- NS_LITERAL_STRING("vrdisplayconnected"));
- }
-}
-
-void
-VREventObserver::NotifyVRDisplayDisconnect()
-{
- if (mWindow->AsInner()->IsCurrentInnerWindow()) {
- mWindow->NotifyActiveVRDisplaysChanged();
- MOZ_ASSERT(nsContentUtils::IsSafeToRunScript());
- mWindow->GetOuterWindow()->DispatchCustomEvent(
- NS_LITERAL_STRING("vrdisplaydisconnected"));
- }
-}
-
-void
-VREventObserver::NotifyVRDisplayPresentChange()
-{
- if (mWindow->AsInner()->IsCurrentInnerWindow()) {
- mWindow->NotifyActiveVRDisplaysChanged();
- MOZ_ASSERT(nsContentUtils::IsSafeToRunScript());
- mWindow->GetOuterWindow()->DispatchCustomEvent(
- NS_LITERAL_STRING("vrdisplaypresentchange"));
- }
-}
-
-} // namespace dom
-} // namespace mozilla
diff --git a/dom/vr/VREventObserver.h b/dom/vr/VREventObserver.h
deleted file mode 100644
index a30bb5960..000000000
--- a/dom/vr/VREventObserver.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim: set ts=8 sts=2 et sw=2 tw=80: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
-* License, v. 2.0. If a copy of the MPL was not distributed with this file,
-* You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef mozilla_dom_VREventObserver_h
-#define mozilla_dom_VREventObserver_h
-
-class nsGlobalWindow;
-
-namespace mozilla {
-namespace dom {
-
-class VREventObserver final
-{
-public:
- ~VREventObserver();
- explicit VREventObserver(nsGlobalWindow* aGlobalWindow);
-
- void NotifyVRDisplayConnect();
- void NotifyVRDisplayDisconnect();
- void NotifyVRDisplayPresentChange();
-
-private:
- // Weak pointer, instance is owned by mWindow.
- nsGlobalWindow* MOZ_NON_OWNING_REF mWindow;
-};
-
-} // namespace dom
-} // namespace mozilla
-
-#endif // mozilla_dom_VREventObserver_h
diff --git a/dom/vr/moz.build b/dom/vr/moz.build
deleted file mode 100644
index a4aa8d69b..000000000
--- a/dom/vr/moz.build
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-EXPORTS.mozilla.dom += [
- 'VRDisplay.h',
- 'VREventObserver.h',
- ]
-
-UNIFIED_SOURCES = [
- 'VRDisplay.cpp',
- 'VREventObserver.cpp',
- ]
-
-include('/ipc/chromium/chromium-config.mozbuild')
-
-FINAL_LIBRARY = 'xul'
-LOCAL_INCLUDES += [
- '/dom/base'
-]
diff --git a/dom/webidl/Navigator.webidl b/dom/webidl/Navigator.webidl
index 5452f3247..c353e8be7 100644
--- a/dom/webidl/Navigator.webidl
+++ b/dom/webidl/Navigator.webidl
@@ -268,14 +268,6 @@ partial interface Navigator {
};
#endif // MOZ_GAMEPAD
-partial interface Navigator {
- [Throws, Pref="dom.vr.enabled"]
- Promise<sequence<VRDisplay>> getVRDisplays();
- // TODO: Use FrozenArray once available. (Bug 1236777)
- [Frozen, Cached, Pure, Pref="dom.vr.enabled"]
- readonly attribute sequence<VRDisplay> activeVRDisplays;
-};
-
#ifdef MOZ_TIME_MANAGER
// nsIDOMMozNavigatorTime
partial interface Navigator {
diff --git a/dom/webidl/VRDisplay.webidl b/dom/webidl/VRDisplay.webidl
deleted file mode 100644
index 63ebd1205..000000000
--- a/dom/webidl/VRDisplay.webidl
+++ /dev/null
@@ -1,286 +0,0 @@
-/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-enum VREye {
- "left",
- "right"
-};
-
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRFieldOfView {
- readonly attribute double upDegrees;
- readonly attribute double rightDegrees;
- readonly attribute double downDegrees;
- readonly attribute double leftDegrees;
-};
-
-typedef (HTMLCanvasElement or OffscreenCanvas) VRSource;
-
-dictionary VRLayer {
- /**
- * XXX - When WebVR in WebWorkers is implemented, HTMLCanvasElement below
- * should be replaced with VRSource.
- */
- HTMLCanvasElement? source = null;
-
- /**
- * The left and right viewports contain 4 values defining the viewport
- * rectangles within the canvas to present to the eye in UV space.
- * [0] left offset of the viewport (0.0 - 1.0)
- * [1] top offset of the viewport (0.0 - 1.0)
- * [2] width of the viewport (0.0 - 1.0)
- * [3] height of the viewport (0.0 - 1.0)
- *
- * When no values are passed, they will be processed as though the left
- * and right sides of the viewport were passed:
- *
- * leftBounds: [0.0, 0.0, 0.5, 1.0]
- * rightBounds: [0.5, 0.0, 0.5, 1.0]
- */
- sequence<float> leftBounds = [];
- sequence<float> rightBounds = [];
-};
-
-/**
- * Values describing the capabilities of a VRDisplay.
- * These are expected to be static per-device/per-user.
- */
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRDisplayCapabilities {
- /**
- * hasPosition is true if the VRDisplay is capable of tracking its position.
- */
- readonly attribute boolean hasPosition;
-
- /**
- * hasOrientation is true if the VRDisplay is capable of tracking its orientation.
- */
- readonly attribute boolean hasOrientation;
-
- /**
- * Whether the VRDisplay is separate from the device’s
- * primary display. If presenting VR content will obscure
- * other content on the device, this should be false. When
- * false, the application should not attempt to mirror VR content
- * or update non-VR UI because that content will not be visible.
- */
- readonly attribute boolean hasExternalDisplay;
-
- /**
- * Whether the VRDisplay is capable of presenting content to an HMD or similar device.
- * Can be used to indicate “magic window” devices that are capable of 6DoF tracking but for
- * which requestPresent is not meaningful. If false then calls to requestPresent should
- * always fail, and getEyeParameters should return null.
- */
- readonly attribute boolean canPresent;
-
- /**
- * Indicates the maximum length of the array that requestPresent() will accept. MUST be 1 if
- canPresent is true, 0 otherwise.
- */
- readonly attribute unsigned long maxLayers;
-};
-
-/**
- * Values describing the the stage / play area for devices
- * that support room-scale experiences.
- */
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRStageParameters {
- /**
- * A 16-element array containing the components of a column-major 4x4
- * affine transform matrix. This matrix transforms the sitting-space position
- * returned by get{Immediate}Pose() to a standing-space position.
- */
- [Throws] readonly attribute Float32Array sittingToStandingTransform;
-
- /**
- * Dimensions of the play-area bounds. The bounds are defined
- * as an axis-aligned rectangle on the floor.
- * The center of the rectangle is at (0,0,0) in standing-space
- * coordinates.
- * These bounds are defined for safety purposes.
- * Content should not require the user to move beyond these
- * bounds; however, it is possible for the user to ignore
- * the bounds resulting in position values outside of
- * this rectangle.
- */
- readonly attribute float sizeX;
- readonly attribute float sizeZ;
-};
-
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRPose
-{
- /**
- * position, linearVelocity, and linearAcceleration are 3-component vectors.
- * position is relative to a sitting space. Transforming this point with
- * VRStageParameters.sittingToStandingTransform converts this to standing space.
- */
- [Constant, Throws] readonly attribute Float32Array? position;
- [Constant, Throws] readonly attribute Float32Array? linearVelocity;
- [Constant, Throws] readonly attribute Float32Array? linearAcceleration;
-
- /* orientation is a 4-entry array representing the components of a quaternion. */
- [Constant, Throws] readonly attribute Float32Array? orientation;
- /* angularVelocity and angularAcceleration are the components of 3-dimensional vectors. */
- [Constant, Throws] readonly attribute Float32Array? angularVelocity;
- [Constant, Throws] readonly attribute Float32Array? angularAcceleration;
-};
-
-[Constructor,
- Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRFrameData {
- readonly attribute DOMHighResTimeStamp timestamp;
-
- [Throws, Pure] readonly attribute Float32Array leftProjectionMatrix;
- [Throws, Pure] readonly attribute Float32Array leftViewMatrix;
-
- [Throws, Pure] readonly attribute Float32Array rightProjectionMatrix;
- [Throws, Pure] readonly attribute Float32Array rightViewMatrix;
-
- [Pure] readonly attribute VRPose pose;
-};
-
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VREyeParameters {
- /**
- * offset is a 3-component vector representing an offset to
- * translate the eye. This value may vary from frame
- * to frame if the user adjusts their headset ipd.
- */
- [Constant, Throws] readonly attribute Float32Array offset;
-
- /* These values may vary as the user adjusts their headset ipd. */
- [Constant] readonly attribute VRFieldOfView fieldOfView;
-
- /**
- * renderWidth and renderHeight specify the recommended render target
- * size of each eye viewport, in pixels. If multiple eyes are rendered
- * in a single render target, then the render target should be made large
- * enough to fit both viewports.
- */
- [Constant] readonly attribute unsigned long renderWidth;
- [Constant] readonly attribute unsigned long renderHeight;
-};
-
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRDisplay : EventTarget {
- readonly attribute boolean isConnected;
- readonly attribute boolean isPresenting;
-
- /**
- * Dictionary of capabilities describing the VRDisplay.
- */
- [Constant] readonly attribute VRDisplayCapabilities capabilities;
-
- /**
- * If this VRDisplay supports room-scale experiences, the optional
- * stage attribute contains details on the room-scale parameters.
- */
- readonly attribute VRStageParameters? stageParameters;
-
- /* Return the current VREyeParameters for the given eye. */
- VREyeParameters getEyeParameters(VREye whichEye);
-
- /**
- * An identifier for this distinct VRDisplay. Used as an
- * association point in the Gamepad API.
- */
- [Constant] readonly attribute unsigned long displayId;
-
- /**
- * A display name, a user-readable name identifying it.
- */
- [Constant] readonly attribute DOMString displayName;
-
- /**
- * Populates the passed VRFrameData with the information required to render
- * the current frame.
- */
- boolean getFrameData(VRFrameData frameData);
-
- /**
- * Return a VRPose containing the future predicted pose of the VRDisplay
- * when the current frame will be presented. Subsequent calls to getPose()
- * MUST return a VRPose with the same values until the next call to
- * submitFrame().
- *
- * The VRPose will contain the position, orientation, velocity,
- * and acceleration of each of these properties.
- */
- [NewObject] VRPose getPose();
-
- /**
- * Reset the pose for this display, treating its current position and
- * orientation as the "origin/zero" values. VRPose.position,
- * VRPose.orientation, and VRStageParameters.sittingToStandingTransform may be
- * updated when calling resetPose(). This should be called in only
- * sitting-space experiences.
- */
- void resetPose();
-
- /**
- * z-depth defining the near plane of the eye view frustum
- * enables mapping of values in the render target depth
- * attachment to scene coordinates. Initially set to 0.01.
- */
- attribute double depthNear;
-
- /**
- * z-depth defining the far plane of the eye view frustum
- * enables mapping of values in the render target depth
- * attachment to scene coordinates. Initially set to 10000.0.
- */
- attribute double depthFar;
-
- /**
- * The callback passed to `requestAnimationFrame` will be called
- * any time a new frame should be rendered. When the VRDisplay is
- * presenting the callback will be called at the native refresh
- * rate of the HMD. When not presenting this function acts
- * identically to how window.requestAnimationFrame acts. Content should
- * make no assumptions of frame rate or vsync behavior as the HMD runs
- * asynchronously from other displays and at differing refresh rates.
- */
- [Throws] long requestAnimationFrame(FrameRequestCallback callback);
-
- /**
- * Passing the value returned by `requestAnimationFrame` to
- * `cancelAnimationFrame` will unregister the callback.
- */
- [Throws] void cancelAnimationFrame(long handle);
-
- /**
- * Begin presenting to the VRDisplay. Must be called in response to a user gesture.
- * Repeat calls while already presenting will update the VRLayers being displayed.
- */
- [Throws] Promise<void> requestPresent(sequence<VRLayer> layers);
-
- /**
- * Stops presenting to the VRDisplay.
- */
- [Throws] Promise<void> exitPresent();
-
- /**
- * Get the layers currently being presented.
- */
- sequence<VRLayer> getLayers();
-
- /**
- * The VRLayer provided to the VRDisplay will be captured and presented
- * in the HMD. Calling this function has the same effect on the source
- * canvas as any other operation that uses its source image, and canvases
- * created without preserveDrawingBuffer set to true will be cleared.
- */
- void submitFrame();
-};
diff --git a/dom/webidl/moz.build b/dom/webidl/moz.build
index 4c2567a1c..06fea2f20 100644
--- a/dom/webidl/moz.build
+++ b/dom/webidl/moz.build
@@ -555,7 +555,6 @@ WEBIDL_FILES = [
'VideoStreamTrack.webidl',
'VideoTrack.webidl',
'VideoTrackList.webidl',
- 'VRDisplay.webidl',
'VTTCue.webidl',
'VTTRegion.webidl',
'WaveShaperNode.webidl',