diff options
Diffstat (limited to 'media/webrtc')
8 files changed, 20 insertions, 80 deletions
diff --git a/media/webrtc/moz.build b/media/webrtc/moz.build index 986655c53..4361273b4 100644 --- a/media/webrtc/moz.build +++ b/media/webrtc/moz.build @@ -101,22 +101,21 @@ if CONFIG['MOZ_WEBRTC_SIGNALING']: else: CXXFLAGS += ['-validate-charset-'] -if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'gonk': - GYP_DIRS += ['trunk/testing'] - GYP_DIRS['trunk/testing'].input = 'trunk/testing/gtest.gyp' - GYP_DIRS['trunk/testing'].variables = gyp_vars - # We allow warnings for third-party code that can be updated from upstream. - GYP_DIRS['trunk/testing'].sandbox_vars['ALLOW_COMPILER_WARNINGS'] = True - GYP_DIRS['trunk/testing'].non_unified_sources += webrtc_non_unified_sources +GYP_DIRS += ['trunk/testing'] +GYP_DIRS['trunk/testing'].input = 'trunk/testing/gtest.gyp' +GYP_DIRS['trunk/testing'].variables = gyp_vars +# We allow warnings for third-party code that can be updated from upstream. +GYP_DIRS['trunk/testing'].sandbox_vars['ALLOW_COMPILER_WARNINGS'] = True +GYP_DIRS['trunk/testing'].non_unified_sources += webrtc_non_unified_sources - if CONFIG['MOZ_WEBRTC_SIGNALING']: - GYP_DIRS += ['signalingtest'] - GYP_DIRS['signalingtest'].input = 'signaling/signaling.gyp' - GYP_DIRS['signalingtest'].variables = gyp_vars.copy() - GYP_DIRS['signalingtest'].variables.update( - build_for_test=1, - moz_webrtc_mediacodec=0, - build_for_standalone=0 - ) - GYP_DIRS['signalingtest'].sandbox_vars['ALLOW_COMPILER_WARNINGS'] = True - GYP_DIRS['signalingtest'].non_unified_sources += signaling_non_unified_sources +if CONFIG['MOZ_WEBRTC_SIGNALING']: + GYP_DIRS += ['signalingtest'] + GYP_DIRS['signalingtest'].input = 'signaling/signaling.gyp' + GYP_DIRS['signalingtest'].variables = gyp_vars.copy() + GYP_DIRS['signalingtest'].variables.update( + build_for_test=1, + moz_webrtc_mediacodec=0, + build_for_standalone=0 + ) + GYP_DIRS['signalingtest'].sandbox_vars['ALLOW_COMPILER_WARNINGS'] = True + GYP_DIRS['signalingtest'].non_unified_sources += signaling_non_unified_sources diff --git a/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp b/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp index dc052f4e0..888b87857 100644 --- a/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp +++ b/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp @@ -26,9 +26,6 @@ using namespace android; #include "runnable_utils.h" // Gecko -#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21 -#include "GonkBufferQueueProducer.h" -#endif #include "GonkNativeWindow.h" #include "GrallocImages.h" #include "mozilla/Atomics.h" @@ -326,30 +323,16 @@ public: mHeight = aHeight; sp<Surface> surface = nullptr; -#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21 - sp<IGraphicBufferProducer> producer; - sp<IGonkGraphicBufferConsumer> consumer; - GonkBufferQueue::createBufferQueue(&producer, &consumer); - mNativeWindow = new GonkNativeWindow(consumer); -#else mNativeWindow = new GonkNativeWindow(); -#endif if (mNativeWindow.get()) { // listen to buffers queued by MediaCodec::RenderOutputBufferAndRelease(). mNativeWindow->setNewFrameCallback(this); // XXX remove buffer changes after a better solution lands - bug 1009420 -#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21 - static_cast<GonkBufferQueueProducer*>(producer.get())->setSynchronousMode(false); - // More spare buffers to avoid OMX decoder waiting for native window - consumer->setMaxAcquiredBufferCount(WEBRTC_OMX_H264_MIN_DECODE_BUFFERS); - surface = new Surface(producer); -#else sp<GonkBufferQueue> bq = mNativeWindow->getBufferQueue(); bq->setSynchronousMode(false); // More spare buffers to avoid OMX decoder waiting for native window bq->setMaxAcquiredBufferCount(WEBRTC_OMX_H264_MIN_DECODE_BUFFERS); surface = new Surface(bq); -#endif } status_t result = mCodec->configure(config, surface, nullptr, 0); if (result == OK) { diff --git a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp index 586876406..d47a10312 100644 --- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp +++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp @@ -2217,11 +2217,7 @@ public: #if defined(MOZILLA_INTERNAL_API) if (buffer) { // Create a video frame using |buffer|. -#ifdef MOZ_WIDGET_GONK - RefPtr<PlanarYCbCrImage> yuvImage = new GrallocImage(); -#else RefPtr<PlanarYCbCrImage> yuvImage = image_container_->CreatePlanarYCbCrImage(); -#endif uint8_t* frame = const_cast<uint8_t*>(static_cast<const uint8_t*> (buffer)); PlanarYCbCrData yuvData; diff --git a/media/webrtc/signaling/test/moz.build b/media/webrtc/signaling/test/moz.build index 4d8704de4..aac331b1b 100644 --- a/media/webrtc/signaling/test/moz.build +++ b/media/webrtc/signaling/test/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # TODO: bug 1172551 - get these tests working on iOS -if CONFIG['OS_TARGET'] != 'WINNT' and CONFIG['MOZ_WIDGET_TOOLKIT'] != 'gonk' and CONFIG['MOZ_WIDGET_TOOLKIT'] != 'uikit': +if CONFIG['OS_TARGET'] != 'WINNT' and CONFIG['MOZ_WIDGET_TOOLKIT'] != 'uikit': GeckoCppUnitTests([ 'jsep_session_unittest', 'jsep_track_unittest', diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_utility_android.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_utility_android.h index fd05a61ad..1c1ce1ca6 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_utility_android.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_utility_android.h @@ -15,9 +15,7 @@ #ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H #define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H -#if !defined(MOZ_WIDGET_GONK) #include <jni.h> -#endif #include "webrtc/base/checks.h" #include "webrtc/modules/audio_device/audio_device_utility.h" diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.cc b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.cc index d0a46afde..5424ba3de 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.cc +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.cc @@ -9,18 +9,14 @@ */ #include "webrtc/modules/audio_device/android/audio_manager.h" -#if !defined(MOZ_WIDGET_GONK) #include "AndroidJNIWrapper.h" -#endif #include <android/log.h> #include "webrtc/base/arraysize.h" #include "webrtc/base/checks.h" #include "webrtc/modules/audio_device/android/audio_common.h" -#if !defined(MOZ_WIDGET_GONK) #include "webrtc/modules/utility/interface/helpers_android.h" -#endif #define TAG "AudioManager" #define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__) @@ -31,14 +27,11 @@ namespace webrtc { -#if !defined(MOZ_WIDGET_GONK) static JavaVM* g_jvm = NULL; static jobject g_context = NULL; static jclass g_audio_manager_class = NULL; -#endif void AudioManager::SetAndroidAudioDeviceObjects(void* jvm, void* context) { -#if !defined(MOZ_WIDGET_GONK) ALOGD("SetAndroidAudioDeviceObjects%s", GetThreadInfo().c_str()); CHECK(jvm); @@ -65,11 +58,9 @@ void AudioManager::SetAndroidAudioDeviceObjects(void* jvm, void* context) { jni->RegisterNatives(g_audio_manager_class, native_methods, arraysize(native_methods)); CHECK_EXCEPTION(jni) << "Error during RegisterNatives"; -#endif } void AudioManager::ClearAndroidAudioDeviceObjects() { -#if !defined(MOZ_WIDGET_GONK) ALOGD("ClearAndroidAudioDeviceObjects%s", GetThreadInfo().c_str()); JNIEnv* jni = GetEnv(g_jvm); CHECK(jni) << "AttachCurrentThread must be called on this tread"; @@ -80,36 +71,28 @@ void AudioManager::ClearAndroidAudioDeviceObjects() { DeleteGlobalRef(jni, g_context); g_context = NULL; g_jvm = NULL; -#endif } AudioManager::AudioManager() : initialized_(false) { -#if !defined(MOZ_WIDGET_GONK) j_audio_manager_ = NULL; ALOGD("ctor%s", GetThreadInfo().c_str()); -#endif CHECK(HasDeviceObjects()); CreateJavaInstance(); } AudioManager::~AudioManager() { -#if !defined(MOZ_WIDGET_GONK) ALOGD("~dtor%s", GetThreadInfo().c_str()); -#endif DCHECK(thread_checker_.CalledOnValidThread()); Close(); -#if !defined(MOZ_WIDGET_GONK) AttachThreadScoped ats(g_jvm); JNIEnv* jni = ats.env(); jni->DeleteGlobalRef(j_audio_manager_); j_audio_manager_ = NULL; -#endif DCHECK(!initialized_); } bool AudioManager::Init() { -#if !defined(MOZ_WIDGET_GONK) ALOGD("Init%s", GetThreadInfo().c_str()); DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(!initialized_); @@ -122,13 +105,11 @@ bool AudioManager::Init() { ALOGE("init failed!"); return false; } -#endif initialized_ = true; return true; } bool AudioManager::Close() { -#if !defined(MOZ_WIDGET_GONK) ALOGD("Close%s", GetThreadInfo().c_str()); DCHECK(thread_checker_.CalledOnValidThread()); if (!initialized_) @@ -139,12 +120,10 @@ bool AudioManager::Close() { jni, g_audio_manager_class, "dispose", "()V"); jni->CallVoidMethod(j_audio_manager_, disposeID); CHECK_EXCEPTION(jni); -#endif initialized_ = false; return true; } -#if !defined(MOZ_WIDGET_GONK) void JNICALL AudioManager::CacheAudioParameters(JNIEnv* env, jobject obj, jint sample_rate, jint channels, jlong nativeAudioManager) { webrtc::AudioManager* this_object = @@ -162,7 +141,6 @@ void AudioManager::OnCacheAudioParameters( playout_parameters_.reset(sample_rate, channels); record_parameters_.reset(sample_rate, channels); } -#endif AudioParameters AudioManager::GetPlayoutAudioParameters() const { CHECK(playout_parameters_.is_valid()); @@ -175,15 +153,10 @@ AudioParameters AudioManager::GetRecordAudioParameters() const { } bool AudioManager::HasDeviceObjects() { -#if !defined(MOZ_WIDGET_GONK) return (g_jvm && g_context && g_audio_manager_class); -#else - return true; -#endif } void AudioManager::CreateJavaInstance() { -#if !defined(MOZ_WIDGET_GONK) ALOGD("CreateJavaInstance"); AttachThreadScoped ats(g_jvm); JNIEnv* jni = ats.env(); @@ -198,7 +171,6 @@ void AudioManager::CreateJavaInstance() { j_audio_manager_ = jni->NewGlobalRef(j_audio_manager_); CHECK_EXCEPTION(jni) << "Error during NewGlobalRef"; CHECK(j_audio_manager_); -#endif } } // namespace webrtc diff --git a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.h b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.h index a85acb9bb..a6c712e11 100644 --- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.h +++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_manager.h @@ -11,17 +11,13 @@ #ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ #define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ -#if !defined(MOZ_WIDGET_GONK) #include <jni.h> -#endif #include "webrtc/base/thread_checker.h" #include "webrtc/modules/audio_device/android/audio_common.h" #include "webrtc/modules/audio_device/include/audio_device_defines.h" #include "webrtc/modules/audio_device/audio_device_generic.h" -#if !defined(MOZ_WIDGET_GONK) #include "webrtc/modules/utility/interface/helpers_android.h" -#endif namespace webrtc { @@ -100,14 +96,12 @@ class AudioManager { bool initialized() const { return initialized_; } private: -#if !defined(MOZ_WIDGET_GONK) // Called from Java side so we can cache the native audio parameters. // This method will be called by the WebRtcAudioManager constructor, i.e. // on the same thread that this object is created on. static void JNICALL CacheAudioParameters(JNIEnv* env, jobject obj, jint sample_rate, jint channels, jlong nativeAudioManager); void OnCacheAudioParameters(JNIEnv* env, jint sample_rate, jint channels); -#endif // Returns true if SetAndroidAudioDeviceObjects() has been called // successfully. @@ -121,10 +115,8 @@ class AudioManager { // other methods are called from the same thread. rtc::ThreadChecker thread_checker_; -#if !defined(MOZ_WIDGET_GONK) // The Java WebRtcAudioManager instance. jobject j_audio_manager_; -#endif // Set to true by Init() and false by Close(). bool initialized_; diff --git a/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h b/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h index 4e0b15355..578622411 100644 --- a/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h +++ b/media/webrtc/trunk/webrtc/video_engine/include/vie_base.h @@ -21,7 +21,7 @@ #include "webrtc/common_types.h" -#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(MOZ_WIDGET_GONK) +#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) #include <jni.h> #endif @@ -156,7 +156,7 @@ class WEBRTC_DLLEXPORT VideoEngine { // user receives callbacks for generated trace messages. static int SetTraceCallback(TraceCallback* callback); -#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(MOZ_WIDGET_GONK) +#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) // Android specific. static int SetAndroidObjects(JavaVM* java_vm); #endif |