summaryrefslogtreecommitdiffstats
path: root/js/src/devtools
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/devtools')
-rw-r--r--js/src/devtools/Instruments.cpp235
-rw-r--r--js/src/devtools/Instruments.h23
-rw-r--r--js/src/devtools/automation/arm64-jstests-slow.txt51
-rwxr-xr-xjs/src/devtools/automation/autospider.py406
-rwxr-xr-xjs/src/devtools/automation/autospider.sh3
-rw-r--r--js/src/devtools/automation/cgc-jittest-timeouts.txt47
-rw-r--r--js/src/devtools/automation/cgc-jstests-slow.txt62
-rw-r--r--js/src/devtools/automation/macbuildenv.sh14
-rw-r--r--js/src/devtools/automation/variants/arm-sim6
-rw-r--r--js/src/devtools/automation/variants/arm-sim-osx6
-rw-r--r--js/src/devtools/automation/variants/arm64-sim10
-rw-r--r--js/src/devtools/automation/variants/asan9
-rw-r--r--js/src/devtools/automation/variants/compacting14
-rw-r--r--js/src/devtools/automation/variants/dtrace5
-rw-r--r--js/src/devtools/automation/variants/msan13
-rw-r--r--js/src/devtools/automation/variants/nonunified10
-rw-r--r--js/src/devtools/automation/variants/plain7
-rw-r--r--js/src/devtools/automation/variants/plaindebug7
-rw-r--r--js/src/devtools/automation/variants/rootanalysis9
-rw-r--r--js/src/devtools/automation/variants/tsan13
-rw-r--r--js/src/devtools/automation/variants/warnaserr4
-rw-r--r--js/src/devtools/automation/variants/warnaserrdebug4
-rw-r--r--js/src/devtools/automation/winbuildenv.sh36
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/bigTextNodes.js25
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/events.js25
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/expandoEvents.js26
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayArrayLiteral.js16
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayBuffer.js23
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayFgFinalized.js23
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayLargeArray.js18
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayLargeObject.js23
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayNewObject.js16
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayObjectLiteral.js16
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/globalArrayReallocArray.js18
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/largeArrayPropertyAndElements.js34
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/noAllocation.js6
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/pairCyclicWeakMap.js33
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/propertyTreeSplitting.js25
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/selfCyclicWeakMap.js29
-rw-r--r--js/src/devtools/gc-ubench/benchmarks/textNodes.js23
-rw-r--r--js/src/devtools/gc-ubench/harness.js661
-rw-r--r--js/src/devtools/gc-ubench/index.html84
-rw-r--r--js/src/devtools/gc/README.txt6
-rw-r--r--js/src/devtools/gc/gc-test.py166
-rw-r--r--js/src/devtools/gc/tests/clock.js35
-rw-r--r--js/src/devtools/gc/tests/dslots.js26
-rw-r--r--js/src/devtools/gc/tests/loops.js55
-rw-r--r--js/src/devtools/gc/tests/objGraph.js37
-rw-r--r--js/src/devtools/gctrace/Makefile6
-rw-r--r--js/src/devtools/gctrace/gcstats.cpp873
-rw-r--r--js/src/devtools/gnuplot/gcTimer.gnu24
-rw-r--r--js/src/devtools/javascript-trace.d34
-rwxr-xr-xjs/src/devtools/octane-csv.sh76
-rwxr-xr-xjs/src/devtools/plot-octane.R38
-rwxr-xr-xjs/src/devtools/release/release-notes195
-rw-r--r--js/src/devtools/rootAnalysis/CFG.js159
-rw-r--r--js/src/devtools/rootAnalysis/Makefile.in79
-rw-r--r--js/src/devtools/rootAnalysis/README.md64
-rwxr-xr-xjs/src/devtools/rootAnalysis/analyze.py298
-rw-r--r--js/src/devtools/rootAnalysis/analyzeRoots.js871
-rw-r--r--js/src/devtools/rootAnalysis/annotations.js404
-rwxr-xr-xjs/src/devtools/rootAnalysis/build.js11
-rw-r--r--js/src/devtools/rootAnalysis/build/gcc-b2g.manifest11
-rw-r--r--js/src/devtools/rootAnalysis/build/gcc.manifest19
-rw-r--r--js/src/devtools/rootAnalysis/build/sixgill-b2g.manifest10
-rw-r--r--js/src/devtools/rootAnalysis/build/sixgill.manifest10
-rw-r--r--js/src/devtools/rootAnalysis/computeCallgraph.js435
-rw-r--r--js/src/devtools/rootAnalysis/computeGCFunctions.js69
-rw-r--r--js/src/devtools/rootAnalysis/computeGCTypes.js299
-rw-r--r--js/src/devtools/rootAnalysis/expect.b2g.json3
-rw-r--r--js/src/devtools/rootAnalysis/expect.browser.json3
-rw-r--r--js/src/devtools/rootAnalysis/expect.shell.json3
-rwxr-xr-xjs/src/devtools/rootAnalysis/explain.py103
-rwxr-xr-xjs/src/devtools/rootAnalysis/gen-hazards.sh15
-rw-r--r--js/src/devtools/rootAnalysis/loadCallgraph.js203
-rwxr-xr-xjs/src/devtools/rootAnalysis/run-analysis.sh4
-rw-r--r--js/src/devtools/rootAnalysis/run-test.py89
-rwxr-xr-xjs/src/devtools/rootAnalysis/run_complete380
-rw-r--r--js/src/devtools/rootAnalysis/t/exceptions/source.cpp42
-rw-r--r--js/src/devtools/rootAnalysis/t/exceptions/test.py19
-rw-r--r--js/src/devtools/rootAnalysis/t/hazards/source.cpp186
-rw-r--r--js/src/devtools/rootAnalysis/t/hazards/test.py47
-rw-r--r--js/src/devtools/rootAnalysis/t/sixgill-tree/source.cpp70
-rw-r--r--js/src/devtools/rootAnalysis/t/sixgill-tree/test.py60
-rw-r--r--js/src/devtools/rootAnalysis/t/sixgill.py63
-rw-r--r--js/src/devtools/rootAnalysis/t/suppression/source.cpp64
-rw-r--r--js/src/devtools/rootAnalysis/t/suppression/test.py23
-rw-r--r--js/src/devtools/rootAnalysis/t/testlib.py120
-rw-r--r--js/src/devtools/rootAnalysis/utility.js211
-rw-r--r--js/src/devtools/sharkctl.cpp207
-rw-r--r--js/src/devtools/sharkctl.h24
-rw-r--r--js/src/devtools/vprof/manifest.mk7
-rw-r--r--js/src/devtools/vprof/readme.txt97
-rw-r--r--js/src/devtools/vprof/testVprofMT.c92
-rw-r--r--js/src/devtools/vprof/vprof.cpp354
-rw-r--r--js/src/devtools/vprof/vprof.h275
96 files changed, 9192 insertions, 0 deletions
diff --git a/js/src/devtools/Instruments.cpp b/js/src/devtools/Instruments.cpp
new file mode 100644
index 000000000..7a69cac07
--- /dev/null
+++ b/js/src/devtools/Instruments.cpp
@@ -0,0 +1,235 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "Instruments.h"
+#include "mozilla/Attributes.h"
+
+#ifdef __APPLE__
+
+#include <dlfcn.h>
+#include <CoreFoundation/CoreFoundation.h>
+#include <unistd.h>
+
+// There are now 2 paths to the DTPerformanceSession framework. We try to load
+// the one contained in /Applications/Xcode.app first, falling back to the one
+// contained in /Library/Developer/4.0/Instruments.
+#define DTPerformanceLibraryPath "/Applications/Xcode.app/Contents/Developer/Library/Frameworks/DTPerformanceSession.framework/Versions/Current/DTPerformanceSession"
+#define OldDTPerformanceLibraryPath "/Library/Developer/4.0/Instruments/Frameworks/DTPerformanceSession.framework/Versions/Current/DTPerformanceSession"
+
+extern "C" {
+
+typedef CFTypeRef DTPerformanceSessionRef;
+
+#define DTPerformanceSession_TimeProfiler "com.apple.instruments.dtps.timeprofiler"
+// DTPerformanceSession_Option_SamplingInterval is measured in microseconds
+#define DTPerformanceSession_Option_SamplingInterval "com.apple.instruments.dtps.option.samplinginterval"
+
+typedef void (*dtps_errorcallback_t)(CFStringRef, CFErrorRef);
+typedef DTPerformanceSessionRef (*DTPerformanceSessionCreateFunction)(CFStringRef, CFStringRef, CFDictionaryRef, CFErrorRef*);
+typedef bool (*DTPerformanceSessionAddInstrumentFunction)(DTPerformanceSessionRef, CFStringRef, CFDictionaryRef, dtps_errorcallback_t, CFErrorRef*);
+typedef bool (*DTPerformanceSessionIsRecordingFunction)(DTPerformanceSessionRef);
+typedef bool (*DTPerformanceSessionStartFunction)(DTPerformanceSessionRef, CFArrayRef, CFErrorRef*);
+typedef bool (*DTPerformanceSessionStopFunction)(DTPerformanceSessionRef, CFArrayRef, CFErrorRef*);
+typedef bool (*DTPerformanceSessionSaveFunction)(DTPerformanceSessionRef, CFStringRef, CFErrorRef*);
+
+} // extern "C"
+
+namespace Instruments {
+
+static const int kSamplingInterval = 20; // microseconds
+
+template<typename T>
+class AutoReleased
+{
+public:
+ MOZ_IMPLICIT AutoReleased(T aTypeRef) : mTypeRef(aTypeRef)
+ {
+ }
+ ~AutoReleased()
+ {
+ if (mTypeRef) {
+ CFRelease(mTypeRef);
+ }
+ }
+
+ operator T()
+ {
+ return mTypeRef;
+ }
+
+private:
+ T mTypeRef;
+};
+
+#define DTPERFORMANCE_SYMBOLS \
+ SYMBOL(DTPerformanceSessionCreate) \
+ SYMBOL(DTPerformanceSessionAddInstrument) \
+ SYMBOL(DTPerformanceSessionIsRecording) \
+ SYMBOL(DTPerformanceSessionStart) \
+ SYMBOL(DTPerformanceSessionStop) \
+ SYMBOL(DTPerformanceSessionSave)
+
+#define SYMBOL(_sym) \
+ _sym##Function _sym = nullptr;
+
+DTPERFORMANCE_SYMBOLS
+
+#undef SYMBOL
+
+void*
+LoadDTPerformanceLibraries(bool dontLoad)
+{
+ int flags = RTLD_LAZY | RTLD_LOCAL | RTLD_NODELETE;
+ if (dontLoad) {
+ flags |= RTLD_NOLOAD;
+ }
+
+ void* DTPerformanceLibrary = dlopen(DTPerformanceLibraryPath, flags);
+ if (!DTPerformanceLibrary) {
+ DTPerformanceLibrary = dlopen(OldDTPerformanceLibraryPath, flags);
+ }
+ return DTPerformanceLibrary;
+}
+
+bool
+LoadDTPerformanceLibrary()
+{
+ void* DTPerformanceLibrary = LoadDTPerformanceLibraries(true);
+ if (!DTPerformanceLibrary) {
+ DTPerformanceLibrary = LoadDTPerformanceLibraries(false);
+ if (!DTPerformanceLibrary) {
+ return false;
+ }
+ }
+
+#define SYMBOL(_sym) \
+ _sym = reinterpret_cast<_sym##Function>(dlsym(DTPerformanceLibrary, #_sym)); \
+ if (!_sym) { \
+ dlclose(DTPerformanceLibrary); \
+ DTPerformanceLibrary = nullptr; \
+ return false; \
+ }
+
+ DTPERFORMANCE_SYMBOLS
+
+#undef SYMBOL
+
+ dlclose(DTPerformanceLibrary);
+
+ return true;
+}
+
+static DTPerformanceSessionRef gSession;
+
+bool
+Error(CFErrorRef error)
+{
+ if (gSession) {
+ CFErrorRef unused = nullptr;
+ DTPerformanceSessionStop(gSession, nullptr, &unused);
+ CFRelease(gSession);
+ gSession = nullptr;
+ }
+#ifdef DEBUG
+ AutoReleased<CFDataRef> data =
+ CFStringCreateExternalRepresentation(nullptr,
+ CFErrorCopyDescription(error),
+ kCFStringEncodingUTF8, '?');
+ if (data != nullptr) {
+ printf("%.*s\n\n", (int)CFDataGetLength(data), CFDataGetBytePtr(data));
+ }
+#endif
+ return false;
+}
+
+bool
+Start(pid_t pid)
+{
+ if (gSession) {
+ return false;
+ }
+
+ if (!LoadDTPerformanceLibrary()) {
+ return false;
+ }
+
+ AutoReleased<CFStringRef> process =
+ CFStringCreateWithFormat(kCFAllocatorDefault, nullptr, CFSTR("%d"), pid);
+ if (!process) {
+ return false;
+ }
+ CFErrorRef error = nullptr;
+ gSession = DTPerformanceSessionCreate(nullptr, process, nullptr, &error);
+ if (!gSession) {
+ return Error(error);
+ }
+
+ AutoReleased<CFNumberRef> interval =
+ CFNumberCreate(0, kCFNumberIntType, &kSamplingInterval);
+ if (!interval) {
+ return false;
+ }
+ CFStringRef keys[1] = { CFSTR(DTPerformanceSession_Option_SamplingInterval) };
+ CFNumberRef values[1] = { interval };
+ AutoReleased<CFDictionaryRef> options =
+ CFDictionaryCreate(kCFAllocatorDefault, (const void**)keys,
+ (const void**)values, 1, &kCFTypeDictionaryKeyCallBacks,
+ &kCFTypeDictionaryValueCallBacks);
+ if (!options) {
+ return false;
+ }
+
+ if (!DTPerformanceSessionAddInstrument(gSession,
+ CFSTR(DTPerformanceSession_TimeProfiler),
+ options, nullptr, &error)) {
+ return Error(error);
+ }
+
+ return Resume();
+}
+
+void
+Pause()
+{
+ if (gSession && DTPerformanceSessionIsRecording(gSession)) {
+ CFErrorRef error = nullptr;
+ if (!DTPerformanceSessionStop(gSession, nullptr, &error)) {
+ Error(error);
+ }
+ }
+}
+
+bool
+Resume()
+{
+ if (!gSession) {
+ return false;
+ }
+
+ CFErrorRef error = nullptr;
+ return DTPerformanceSessionStart(gSession, nullptr, &error) ||
+ Error(error);
+}
+
+void
+Stop(const char* profileName)
+{
+ Pause();
+
+ CFErrorRef error = nullptr;
+ AutoReleased<CFStringRef> name =
+ CFStringCreateWithFormat(kCFAllocatorDefault, nullptr, CFSTR("%s%s"),
+ "/tmp/", profileName ? profileName : "mozilla");
+ if (!DTPerformanceSessionSave(gSession, name, &error)) {
+ Error(error);
+ return;
+ }
+
+ CFRelease(gSession);
+ gSession = nullptr;
+}
+
+} // namespace Instruments
+
+#endif /* __APPLE__ */
diff --git a/js/src/devtools/Instruments.h b/js/src/devtools/Instruments.h
new file mode 100644
index 000000000..c80d8ebbc
--- /dev/null
+++ b/js/src/devtools/Instruments.h
@@ -0,0 +1,23 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef devtools_Instruments_h
+#define devtools_Instruments_h
+
+#ifdef __APPLE__
+
+#include <unistd.h>
+
+namespace Instruments {
+
+bool Start(pid_t pid);
+void Pause();
+bool Resume();
+void Stop(const char* profileName);
+
+}
+
+#endif /* __APPLE__ */
+
+#endif /* devtools_Instruments_h */
diff --git a/js/src/devtools/automation/arm64-jstests-slow.txt b/js/src/devtools/automation/arm64-jstests-slow.txt
new file mode 100644
index 000000000..118097cdb
--- /dev/null
+++ b/js/src/devtools/automation/arm64-jstests-slow.txt
@@ -0,0 +1,51 @@
+ecma/Date/15.9.5.10-2.js
+ecma/Date/15.9.5.11-2.js
+ecma/Date/15.9.5.12-2.js
+ecma/Date/15.9.5.8.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-01-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-02-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-03-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-04-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-05-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-06-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-07-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-08-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-09-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-10-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-11-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-12-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-13-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-14-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-15-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-16-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-17-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-18-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-19-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-20-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-21-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-22-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-23-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-24-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-25-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-26-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-27-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-30-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-31-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-32-of-32.js
+ecma_5/Object/15.2.3.6-middle-redefinition-1-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-2-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-3-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-4-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-5-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-6-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-7-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-8-of-8.js
+ecma_5/Object/15.2.3.6-redefinition-1-of-4.js
+ecma_5/Object/15.2.3.6-redefinition-2-of-4.js
+ecma_5/Object/15.2.3.6-redefinition-3-of-4.js
+ecma_5/Object/15.2.3.6-redefinition-4-of-4.js
+ecma_6/Comprehensions/sudoku.js
+js1_8_5/extensions/clone-complex-object.js
+js1_8_5/reflect-parse/classes.js
+js1_8_5/reflect-parse/destructuring-variable-declarations.js
+js1_8_5/regress/no-array-comprehension-length-limit.js
diff --git a/js/src/devtools/automation/autospider.py b/js/src/devtools/automation/autospider.py
new file mode 100755
index 000000000..030d8c98d
--- /dev/null
+++ b/js/src/devtools/automation/autospider.py
@@ -0,0 +1,406 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import logging
+import re
+import os
+import platform
+import posixpath
+import shutil
+import subprocess
+import sys
+
+from collections import Counter, namedtuple
+from os import environ as env
+from subprocess import Popen
+from threading import Timer
+
+Dirs = namedtuple('Dirs', ['scripts', 'js_src', 'source', 'tooltool'])
+
+
+def directories(pathmodule, cwd, fixup=lambda s: s):
+ scripts = pathmodule.join(fixup(cwd), fixup(pathmodule.dirname(__file__)))
+ js_src = pathmodule.abspath(pathmodule.join(scripts, "..", ".."))
+ source = pathmodule.abspath(pathmodule.join(js_src, "..", ".."))
+ tooltool = pathmodule.abspath(env.get('TOOLTOOL_CHECKOUT',
+ pathmodule.join(source, "..")))
+ return Dirs(scripts, js_src, source, tooltool)
+
+# Some scripts will be called with sh, which cannot use backslashed
+# paths. So for direct subprocess.* invocation, use normal paths from
+# DIR, but when running under the shell, use POSIX style paths.
+DIR = directories(os.path, os.getcwd())
+PDIR = directories(posixpath, os.environ["PWD"],
+ fixup=lambda s: re.sub(r'^(\w):', r'/\1', s))
+
+parser = argparse.ArgumentParser(
+ description='Run a spidermonkey shell build job')
+parser.add_argument('--dep', action='store_true',
+ help='do not clobber the objdir before building')
+parser.add_argument('--platform', '-p', type=str, metavar='PLATFORM',
+ default='', help='build platform, including a suffix ("-debug" or "") used by buildbot to override the variant\'s "debug" setting. The platform can be used to specify 32 vs 64 bits.')
+parser.add_argument('--timeout', '-t', type=int, metavar='TIMEOUT',
+ default=10800,
+ help='kill job after TIMEOUT seconds')
+parser.add_argument('--objdir', type=str, metavar='DIR',
+ default=env.get('OBJDIR', 'obj-spider'),
+ help='object directory')
+group = parser.add_mutually_exclusive_group()
+group.add_argument('--optimize', action='store_true',
+ help='generate an optimized build. Overrides variant setting.')
+group.add_argument('--no-optimize', action='store_false',
+ dest='optimize',
+ help='generate a non-optimized build. Overrides variant setting.')
+group.set_defaults(optimize=None)
+group = parser.add_mutually_exclusive_group()
+group.add_argument('--debug', action='store_true',
+ help='generate a debug build. Overrides variant setting.')
+group.add_argument('--no-debug', action='store_false',
+ dest='debug',
+ help='generate a non-debug build. Overrides variant setting.')
+group.set_defaults(debug=None)
+parser.add_argument('--run-tests', '--tests', type=str, metavar='TESTSUITE',
+ default='',
+ help="comma-separated set of test suites to add to the variant's default set")
+parser.add_argument('--skip-tests', '--skip', type=str, metavar='TESTSUITE',
+ default='',
+ help="comma-separated set of test suites to remove from the variant's default set")
+parser.add_argument('--build-only', '--build',
+ dest='skip_tests', action='store_const', const='all',
+ help="only do a build, do not run any tests")
+parser.add_argument('--nobuild', action='store_true',
+ help='Do not do a build. Rerun tests on existing build.')
+parser.add_argument('variant', type=str,
+ help='type of job requested, see variants/ subdir')
+args = parser.parse_args()
+
+
+def set_vars_from_script(script, vars):
+ '''Run a shell script, then dump out chosen environment variables. The build
+ system uses shell scripts to do some configuration that we need to
+ borrow. On Windows, the script itself must output the variable settings
+ (in the form "export FOO=<value>"), since otherwise there will be
+ problems with mismatched Windows/POSIX formats.
+ '''
+ script_text = 'source %s' % script
+ if platform.system() == 'Windows':
+ parse_state = 'parsing exports'
+ else:
+ script_text += '; echo VAR SETTINGS:; '
+ script_text += '; '.join('echo $' + var for var in vars)
+ parse_state = 'scanning'
+ stdout = subprocess.check_output(['sh', '-x', '-c', script_text])
+ tograb = vars[:]
+ originals = {}
+ for line in stdout.splitlines():
+ if parse_state == 'scanning':
+ if line == 'VAR SETTINGS:':
+ parse_state = 'grabbing'
+ elif parse_state == 'grabbing':
+ var = tograb.pop(0)
+ env[var] = line
+ elif parse_state == 'parsing exports':
+ m = re.match(r'export (\w+)=(.*)', line)
+ if m:
+ var, value = m.groups()
+ if var in tograb:
+ env[var] = value
+ print("Setting %s = %s" % (var, value))
+ if var.startswith("ORIGINAL_"):
+ originals[var[9:]] = value
+
+ # An added wrinkle: on Windows developer systems, the sourced script will
+ # blow away current settings for eg LIBS, to point to the ones that would
+ # be installed via automation. So we will append the original settings. (On
+ # an automation system, the original settings will be empty or point to
+ # nonexistent stuff.)
+ if platform.system() == 'Windows':
+ for var in vars:
+ if var in originals and len(originals[var]) > 0:
+ env[var] = "%s;%s" % (env[var], originals[var])
+
+
+def ensure_dir_exists(name, clobber=True):
+ if clobber:
+ shutil.rmtree(name, ignore_errors=True)
+ try:
+ os.mkdir(name)
+ except OSError:
+ if clobber:
+ raise
+
+with open(os.path.join(DIR.scripts, "variants", args.variant)) as fh:
+ variant = json.load(fh)
+
+if args.variant == 'nonunified':
+ # Rewrite js/src/**/moz.build to replace UNIFIED_SOURCES to SOURCES.
+ # Note that this modifies the current checkout.
+ for dirpath, dirnames, filenames in os.walk(DIR.js_src):
+ if 'moz.build' in filenames:
+ subprocess.check_call(['sed', '-i', 's/UNIFIED_SOURCES/SOURCES/',
+ os.path.join(dirpath, 'moz.build')])
+
+OBJDIR = os.path.join(DIR.source, args.objdir)
+OUTDIR = os.path.join(OBJDIR, "out")
+POBJDIR = posixpath.join(PDIR.source, args.objdir)
+AUTOMATION = env.get('AUTOMATION', False)
+MAKE = env.get('MAKE', 'make')
+MAKEFLAGS = env.get('MAKEFLAGS', '-j6')
+UNAME_M = subprocess.check_output(['uname', '-m']).strip()
+
+CONFIGURE_ARGS = variant['configure-args']
+
+opt = args.optimize
+if opt is None:
+ opt = variant.get('optimize')
+if opt is not None:
+ CONFIGURE_ARGS += (" --enable-optimize" if opt else " --disable-optimize")
+
+opt = args.debug
+if opt is None and args.platform:
+ # Override variant['debug'].
+ opt = ('-debug' in args.platform)
+if opt is None:
+ opt = variant.get('debug')
+if opt is not None:
+ CONFIGURE_ARGS += (" --enable-debug" if opt else " --disable-debug")
+
+# Any jobs that wish to produce additional output can save them into the upload
+# directory if there is such a thing, falling back to OBJDIR.
+env.setdefault('MOZ_UPLOAD_DIR', OBJDIR)
+ensure_dir_exists(env['MOZ_UPLOAD_DIR'], clobber=False)
+
+# Some of the variants request a particular word size (eg ARM simulators).
+word_bits = variant.get('bits')
+
+# On Linux and Windows, we build 32- and 64-bit versions on a 64 bit
+# host, so the caller has to specify what is desired.
+if word_bits is None and args.platform:
+ platform_arch = args.platform.split('-')[0]
+ if platform_arch in ('win32', 'linux'):
+ word_bits = 32
+ elif platform_arch in ('win64', 'linux64'):
+ word_bits = 64
+
+# Fall back to the word size of the host.
+if word_bits is None:
+ word_bits = 64 if UNAME_M == 'x86_64' else 32
+
+if 'compiler' in variant:
+ compiler = variant['compiler']
+elif platform.system() == 'Darwin':
+ compiler = 'clang'
+elif platform.system() == 'Windows':
+ compiler = 'cl'
+else:
+ compiler = 'gcc'
+
+cxx = {'clang': 'clang++', 'gcc': 'g++', 'cl': 'cl'}.get(compiler)
+
+compiler_dir = env.get('GCCDIR', os.path.join(DIR.tooltool, compiler))
+if os.path.exists(os.path.join(compiler_dir, 'bin', compiler)):
+ env.setdefault('CC', os.path.join(compiler_dir, 'bin', compiler))
+ env.setdefault('CXX', os.path.join(compiler_dir, 'bin', cxx))
+ platlib = 'lib64' if word_bits == 64 else 'lib'
+ env.setdefault('LD_LIBRARY_PATH', os.path.join(compiler_dir, platlib))
+else:
+ env.setdefault('CC', compiler)
+ env.setdefault('CXX', cxx)
+
+if platform.system() == 'Darwin':
+ os.environ['SOURCE'] = DIR.source
+ set_vars_from_script(os.path.join(DIR.scripts, 'macbuildenv.sh'),
+ ['CC', 'CXX'])
+elif platform.system() == 'Windows':
+ MAKE = env.get('MAKE', 'mozmake')
+ os.environ['SOURCE'] = DIR.source
+ if word_bits == 64:
+ os.environ['USE_64BIT'] = '1'
+ set_vars_from_script(posixpath.join(PDIR.scripts, 'winbuildenv.sh'),
+ ['PATH', 'INCLUDE', 'LIB', 'LIBPATH', 'CC', 'CXX',
+ 'WINDOWSSDKDIR'])
+
+# Compiler flags, based on word length
+if word_bits == 32:
+ if compiler == 'clang':
+ env['CC'] = '{CC} -arch i386'.format(**env)
+ env['CXX'] = '{CXX} -arch i386'.format(**env)
+ elif compiler == 'gcc':
+ env['CC'] = '{CC} -m32'.format(**env)
+ env['CXX'] = '{CXX} -m32'.format(**env)
+ env['AR'] = 'ar'
+
+# Configure flags, based on word length and cross-compilation
+if word_bits == 32:
+ if platform.system() == 'Windows':
+ CONFIGURE_ARGS += ' --target=i686-pc-mingw32 --host=i686-pc-mingw32'
+ elif platform.system() == 'Linux':
+ if UNAME_M != 'arm':
+ CONFIGURE_ARGS += ' --target=i686-pc-linux --host=i686-pc-linux'
+else:
+ if platform.system() == 'Windows':
+ CONFIGURE_ARGS += ' --target=x86_64-pc-mingw32 --host=x86_64-pc-mingw32'
+
+# Timeouts.
+ACTIVE_PROCESSES = set()
+
+
+def killall():
+ for proc in ACTIVE_PROCESSES:
+ proc.kill()
+ ACTIVE_PROCESSES.clear()
+
+timer = Timer(args.timeout, killall)
+timer.daemon = True
+timer.start()
+
+ensure_dir_exists(OBJDIR, clobber=not args.dep and not args.nobuild)
+ensure_dir_exists(OUTDIR)
+
+
+def run_command(command, check=False, **kwargs):
+ proc = Popen(command, cwd=OBJDIR, **kwargs)
+ ACTIVE_PROCESSES.add(proc)
+ stdout, stderr = None, None
+ try:
+ stdout, stderr = proc.communicate()
+ finally:
+ ACTIVE_PROCESSES.discard(proc)
+ status = proc.wait()
+ if check and status != 0:
+ raise subprocess.CalledProcessError(status, command, output=stderr)
+ return stdout, stderr, status
+
+# Add in environment variable settings for this variant. Normally used to
+# modify the flags passed to the shell or to set the GC zeal mode.
+for k, v in variant.get('env', {}).items():
+ env[k] = v.format(
+ DIR=DIR.scripts,
+ TOOLTOOL_CHECKOUT=DIR.tooltool,
+ MOZ_UPLOAD_DIR=env['MOZ_UPLOAD_DIR'],
+ OUTDIR=OUTDIR,
+ )
+
+if not args.nobuild:
+ CONFIGURE_ARGS += ' --enable-nspr-build'
+ CONFIGURE_ARGS += ' --prefix={OBJDIR}/dist'.format(OBJDIR=POBJDIR)
+
+ # Generate a configure script from configure.in.
+ configure = os.path.join(DIR.js_src, 'configure')
+ if not os.path.exists(configure):
+ shutil.copyfile(configure + ".in", configure)
+ os.chmod(configure, 0755)
+
+ # Run configure; make
+ run_command(['sh', '-c', posixpath.join(PDIR.js_src, 'configure') + ' ' + CONFIGURE_ARGS], check=True)
+ run_command('%s -s -w %s' % (MAKE, MAKEFLAGS), shell=True, check=True)
+
+COMMAND_PREFIX = []
+# On Linux, disable ASLR to make shell builds a bit more reproducible.
+if subprocess.call("type setarch >/dev/null 2>&1", shell=True) == 0:
+ COMMAND_PREFIX.extend(['setarch', UNAME_M, '-R'])
+
+
+def run_test_command(command, **kwargs):
+ _, _, status = run_command(COMMAND_PREFIX + command, check=False, **kwargs)
+ return status
+
+test_suites = set(['jstests', 'jittest', 'jsapitests', 'checks'])
+
+
+def normalize_tests(tests):
+ if 'all' in tests:
+ return test_suites
+ return tests
+
+# Need a platform name to use as a key in variant files.
+if args.platform:
+ variant_platform = args.platform.split("-")[0]
+elif platform.system() == 'Windows':
+ variant_platform = 'win64' if word_bits == 64 else 'win32'
+elif platform.system() == 'Linux':
+ variant_platform = 'linux64' if word_bits == 64 else 'linux'
+elif platform.system() == 'Darwin':
+ variant_platform = 'macosx64'
+else:
+ variant_platform = 'other'
+
+# Skip any tests that are not run on this platform (or the 'all' platform).
+test_suites -= set(normalize_tests(variant.get('skip-tests', {}).get(variant_platform, [])))
+test_suites -= set(normalize_tests(variant.get('skip-tests', {}).get('all', [])))
+
+# Add in additional tests for this platform (or the 'all' platform).
+test_suites |= set(normalize_tests(variant.get('extra-tests', {}).get(variant_platform, [])))
+test_suites |= set(normalize_tests(variant.get('extra-tests', {}).get('all', [])))
+
+# Now adjust the variant's default test list with command-line arguments.
+test_suites |= set(normalize_tests(args.run_tests.split(",")))
+test_suites -= set(normalize_tests(args.skip_tests.split(",")))
+
+# Always run all enabled tests, even if earlier ones failed. But return the
+# first failed status.
+results = []
+
+# 'checks' is a superset of 'check-style'.
+if 'checks' in test_suites:
+ results.append(run_test_command([MAKE, 'check']))
+elif 'check-style' in test_suites:
+ results.append(run_test_command([MAKE, 'check-style']))
+
+if 'jittest' in test_suites:
+ results.append(run_test_command([MAKE, 'check-jit-test']))
+if 'jsapitests' in test_suites:
+ jsapi_test_binary = os.path.join(OBJDIR, 'dist', 'bin', 'jsapi-tests')
+ results.append(run_test_command([jsapi_test_binary]))
+if 'jstests' in test_suites:
+ results.append(run_test_command([MAKE, 'check-jstests']))
+
+# FIXME bug 1291449: This would be unnecessary if we could run msan with -mllvm
+# -msan-keep-going, but in clang 3.8 it causes a hang during compilation.
+if variant.get('ignore-test-failures'):
+ print("Ignoring test results %s" % (results,))
+ results = [0]
+
+if args.variant in ('tsan', 'msan'):
+ files = filter(lambda f: f.startswith("sanitize_log."), os.listdir(OUTDIR))
+ fullfiles = [os.path.join(OUTDIR, f) for f in files]
+
+ # Summarize results
+ sites = Counter()
+ for filename in fullfiles:
+ with open(os.path.join(OUTDIR, filename), 'rb') as fh:
+ for line in fh:
+ m = re.match(r'^SUMMARY: \w+Sanitizer: (?:data race|use-of-uninitialized-value) (.*)',
+ line.strip())
+ if m:
+ # Some reports include file:line:column, some just
+ # file:line. Just in case it's nondeterministic, we will
+ # canonicalize to just the line number.
+ site = re.sub(r'^(\S+?:\d+)(:\d+)* ', r'\1 ', m.group(1))
+ sites[site] += 1
+
+ # Write a summary file and display it to stdout.
+ summary_filename = os.path.join(env['MOZ_UPLOAD_DIR'], "%s_summary.txt" % args.variant)
+ with open(summary_filename, 'wb') as outfh:
+ for location, count in sites.most_common():
+ print >> outfh, "%d %s" % (count, location)
+ print(open(summary_filename, 'rb').read())
+
+ if 'max-errors' in variant:
+ print("Found %d errors out of %d allowed" % (len(sites), variant['max-errors']))
+ if len(sites) > variant['max-errors']:
+ results.append(1)
+
+ # Gather individual results into a tarball. Note that these are
+ # distinguished only by pid of the JS process running within each test, so
+ # given the 16-bit limitation of pids, it's totally possible that some of
+ # these files will be lost due to being overwritten.
+ command = ['tar', '-C', OUTDIR, '-zcf',
+ os.path.join(env['MOZ_UPLOAD_DIR'], '%s.tar.gz' % args.variant)]
+ command += files
+ subprocess.call(command)
+
+for st in results:
+ if st != 0:
+ sys.exit(st)
diff --git a/js/src/devtools/automation/autospider.sh b/js/src/devtools/automation/autospider.sh
new file mode 100755
index 000000000..f6e302bf6
--- /dev/null
+++ b/js/src/devtools/automation/autospider.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+exec python2.7 "$(dirname $0)/autospider.py" "$@"
diff --git a/js/src/devtools/automation/cgc-jittest-timeouts.txt b/js/src/devtools/automation/cgc-jittest-timeouts.txt
new file mode 100644
index 000000000..84e29f893
--- /dev/null
+++ b/js/src/devtools/automation/cgc-jittest-timeouts.txt
@@ -0,0 +1,47 @@
+SIMD/nursery-overflow.js
+asm.js/testParallelCompile.js
+auto-regress/bug653395.js
+auto-regress/bug654392.js
+auto-regress/bug675251.js
+auto-regress/bug729797.js
+baseline/bug847446.js
+baseline/bug852175.js
+basic/bug632964-regexp.js
+basic/bug656261.js
+basic/bug677957-2.js
+basic/bug753283.js
+basic/bug867946.js
+basic/testAtomize.js
+basic/testBug614653.js
+basic/testBug686274.js
+basic/testManyVars.js
+basic/testTypedArrayInit.js
+debug/DebuggeeWouldRun-01.js
+debug/DebuggeeWouldRun-02.js
+gc/bug-1014972.js
+gc/bug-1246593.js
+gc/bug-906236.js
+gc/bug-906241.js
+ion/bug1197769.js
+ion/bug779245.js
+ion/bug787921.js
+ion/bug977966.js
+ion/close-iterators-1.js
+parallel/alloc-many-objs.js
+parallel/alloc-too-many-objs.js
+parser/bug-1263881-1.js
+parser/bug-1263881-2.js
+parser/bug-1263881-3.js
+parser/modifier-yield-without-operand-2.js
+saved-stacks/bug-1006876-too-much-recursion.js
+self-test/assertDeepEq.js
+sunspider/check-string-unpack-code.js
+v8-v5/check-earley-boyer.js
+v8-v5/check-raytrace.js
+v8-v5/check-regexp.js
+v8-v5/check-splay.js
+wasm/spec/f32.wast.js
+wasm/spec/f32_cmp.wast.js
+wasm/spec/f64.wast.js
+wasm/spec/f64_cmp.wast.js
+wasm/spec/float_exprs.wast.js
diff --git a/js/src/devtools/automation/cgc-jstests-slow.txt b/js/src/devtools/automation/cgc-jstests-slow.txt
new file mode 100644
index 000000000..814ad338e
--- /dev/null
+++ b/js/src/devtools/automation/cgc-jstests-slow.txt
@@ -0,0 +1,62 @@
+ecma_5/Object/15.2.3.6-dictionary-redefinition-01-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-02-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-03-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-04-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-05-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-06-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-07-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-08-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-09-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-10-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-11-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-12-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-13-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-14-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-15-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-16-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-17-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-18-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-19-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-20-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-21-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-22-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-23-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-24-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-25-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-26-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-27-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-28-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-29-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-30-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-31-of-32.js
+ecma_5/Object/15.2.3.6-dictionary-redefinition-32-of-32.js
+ecma_5/Object/15.2.3.6-middle-redefinition-1-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-2-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-3-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-4-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-5-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-6-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-7-of-8.js
+ecma_5/Object/15.2.3.6-middle-redefinition-8-of-8.js
+ecma_5/Object/15.2.3.6-redefinition-1-of-4.js
+ecma_5/Object/15.2.3.6-redefinition-2-of-4.js
+ecma_5/Object/15.2.3.6-redefinition-3-of-4.js
+ecma_5/Object/15.2.3.6-redefinition-4-of-4.js
+ecma_6/String/normalize-generateddata-part0.js
+ecma_6/String/normalize-generateddata-part1-not-listed.js
+ecma_6/String/normalize-generateddata-part1.js
+ecma_6/String/normalize-generateddata-part2.js
+ecma_6/String/normalize-generateddata-part3.js
+js1_5/GC/regress-203278-2.js
+js1_5/GC/regress-203278-3.js
+js1_5/GC/regress-278725.js
+js1_5/Regress/regress-312588.js
+js1_5/Regress/regress-321971.js
+js1_5/Regress/regress-360969-01.js
+js1_5/Regress/regress-360969-02.js
+js1_5/Regress/regress-360969-03.js
+js1_5/Regress/regress-360969-04.js
+js1_5/Regress/regress-360969-05.js
+js1_5/Regress/regress-360969-06.js
+js1_8_5/extensions/clone-complex-object.js
+js1_8_5/extensions/clone-object-deep.js
diff --git a/js/src/devtools/automation/macbuildenv.sh b/js/src/devtools/automation/macbuildenv.sh
new file mode 100644
index 000000000..df0f57fdb
--- /dev/null
+++ b/js/src/devtools/automation/macbuildenv.sh
@@ -0,0 +1,14 @@
+# We will be sourcing mozconfig files, which end up calling mk_add_options and
+# ac_add_options with various settings. We only need the variable settings they
+# create along the way.
+mk_add_options() {
+ : do nothing
+}
+ac_add_options() {
+ : do nothing
+}
+
+topsrcdir="$SOURCE"
+
+# Setup CC and CXX variables
+. $topsrcdir/build/macosx/mozconfig.common
diff --git a/js/src/devtools/automation/variants/arm-sim b/js/src/devtools/automation/variants/arm-sim
new file mode 100644
index 000000000..20963b235
--- /dev/null
+++ b/js/src/devtools/automation/variants/arm-sim
@@ -0,0 +1,6 @@
+{
+ "configure-args": "--enable-stdcxx-compat --enable-simulator=arm --target=i686-pc-linux --host=i686-pc-linux",
+ "optimize": true,
+ "debug": true,
+ "bits": 32
+}
diff --git a/js/src/devtools/automation/variants/arm-sim-osx b/js/src/devtools/automation/variants/arm-sim-osx
new file mode 100644
index 000000000..af193fc16
--- /dev/null
+++ b/js/src/devtools/automation/variants/arm-sim-osx
@@ -0,0 +1,6 @@
+{
+ "configure-args": "--enable-stdcxx-compat --enable-simulator=arm --target=i686-apple-darwin10.0.0 --host=i686-apple-darwin10.0.0",
+ "optimize": true,
+ "debug": true,
+ "bits": 32
+}
diff --git a/js/src/devtools/automation/variants/arm64-sim b/js/src/devtools/automation/variants/arm64-sim
new file mode 100644
index 000000000..6874f441c
--- /dev/null
+++ b/js/src/devtools/automation/variants/arm64-sim
@@ -0,0 +1,10 @@
+{
+ "configure-args": "--enable-stdcxx-compat --enable-simulator=arm64",
+ "optimize": true,
+ "debug": true,
+ "env": {
+ "JSTESTS_EXTRA_ARGS": "--exclude-file={DIR}/arm64-jstests-slow.txt",
+ "JITTEST_EXTRA_ARGS": "--jitflags=none --args=--baseline-eager -x ion/ -x asm.js/"
+ },
+ "bits": 64
+}
diff --git a/js/src/devtools/automation/variants/asan b/js/src/devtools/automation/variants/asan
new file mode 100644
index 000000000..1c791eb50
--- /dev/null
+++ b/js/src/devtools/automation/variants/asan
@@ -0,0 +1,9 @@
+{
+ "configure-args": "--enable-debug-symbols='-gline-tables-only' --disable-jemalloc --enable-address-sanitizer",
+ "optimize": true,
+ "debug": false,
+ "compiler": "clang",
+ "env": {
+ "LLVM_SYMBOLIZER": "{TOOLTOOL_CHECKOUT}/clang/bin/llvm-symbolizer"
+ }
+}
diff --git a/js/src/devtools/automation/variants/compacting b/js/src/devtools/automation/variants/compacting
new file mode 100644
index 000000000..cd1891bfe
--- /dev/null
+++ b/js/src/devtools/automation/variants/compacting
@@ -0,0 +1,14 @@
+{
+ "configure-args": "--enable-stdcxx-compat --enable-ctypes",
+ "optimize": true,
+ "debug": true,
+ "env": {
+ "JS_GC_ZEAL": "Compact",
+ "JITTEST_EXTRA_ARGS": "--jitflags=debug --ignore-timeouts={DIR}/cgc-jittest-timeouts.txt",
+ "JSTESTS_EXTRA_ARGS": "--exclude-file={DIR}/cgc-jstests-slow.txt"
+ },
+ "skip-tests": {
+ "win32": ["jstests"],
+ "win64": ["jstests"]
+ }
+}
diff --git a/js/src/devtools/automation/variants/dtrace b/js/src/devtools/automation/variants/dtrace
new file mode 100644
index 000000000..4633de2e2
--- /dev/null
+++ b/js/src/devtools/automation/variants/dtrace
@@ -0,0 +1,5 @@
+{
+ "configure-args": "--enable-dtrace --enable-debug-symbols",
+ "optimize": true,
+ "debug": true,
+}
diff --git a/js/src/devtools/automation/variants/msan b/js/src/devtools/automation/variants/msan
new file mode 100644
index 000000000..ccf2c75f2
--- /dev/null
+++ b/js/src/devtools/automation/variants/msan
@@ -0,0 +1,13 @@
+{
+ "configure-args": "--enable-debug-symbols='-gline-tables-only' --disable-jemalloc --enable-memory-sanitizer",
+ "optimize": true,
+ "debug": false,
+ "compiler": "clang",
+ "env": {
+ "JITTEST_EXTRA_ARGS": "--jitflags=interp --ignore-timeouts={DIR}/cgc-jittest-timeouts.txt",
+ "JSTESTS_EXTRA_ARGS": "--jitflags=interp --exclude-file={DIR}/cgc-jstests-slow.txt",
+ "MSAN_OPTIONS": "external_symbolizer_path={TOOLTOOL_CHECKOUT}/clang/bin/llvm-symbolizer:log_path={OUTDIR}/sanitize_log"
+ },
+ "ignore-test-failures": "true",
+ "max-errors": 2
+}
diff --git a/js/src/devtools/automation/variants/nonunified b/js/src/devtools/automation/variants/nonunified
new file mode 100644
index 000000000..306c00ea8
--- /dev/null
+++ b/js/src/devtools/automation/variants/nonunified
@@ -0,0 +1,10 @@
+{
+ "configure-args": "--enable-warnings-as-errors",
+ "debug": true,
+ "skip-tests": {
+ "all": ["jstests", "jittest", "checks"]
+ },
+ "extra-tests": {
+ "all": ["check-style"]
+ }
+}
diff --git a/js/src/devtools/automation/variants/plain b/js/src/devtools/automation/variants/plain
new file mode 100644
index 000000000..ab954074d
--- /dev/null
+++ b/js/src/devtools/automation/variants/plain
@@ -0,0 +1,7 @@
+{
+ "configure-args": "",
+ "optimize": true,
+ "env": {
+ "JSTESTS_EXTRA_ARGS": "--jitflags=all"
+ }
+}
diff --git a/js/src/devtools/automation/variants/plaindebug b/js/src/devtools/automation/variants/plaindebug
new file mode 100644
index 000000000..e98a3cdbd
--- /dev/null
+++ b/js/src/devtools/automation/variants/plaindebug
@@ -0,0 +1,7 @@
+{
+ "configure-args": "",
+ "debug": true,
+ "env": {
+ "JSTESTS_EXTRA_ARGS": "--jitflags=debug"
+ }
+}
diff --git a/js/src/devtools/automation/variants/rootanalysis b/js/src/devtools/automation/variants/rootanalysis
new file mode 100644
index 000000000..c5ed4dfcd
--- /dev/null
+++ b/js/src/devtools/automation/variants/rootanalysis
@@ -0,0 +1,9 @@
+{
+ "configure-args": "--enable-stdcxx-compat --enable-ctypes",
+ "optimize": true,
+ "debug": true,
+ "env": {
+ "JS_GC_ZEAL": "GenerationalGC",
+ "JSTESTS_EXTRA_ARGS": "--jitflags=debug"
+ }
+}
diff --git a/js/src/devtools/automation/variants/tsan b/js/src/devtools/automation/variants/tsan
new file mode 100644
index 000000000..f831a5b04
--- /dev/null
+++ b/js/src/devtools/automation/variants/tsan
@@ -0,0 +1,13 @@
+{
+ "configure-args": "--enable-debug-symbols='-gline-tables-only' --disable-jemalloc --enable-thread-sanitizer",
+ "optimize": true,
+ "debug": false,
+ "compiler": "clang",
+ "env": {
+ "LLVM_SYMBOLIZER": "{TOOLTOOL_CHECKOUT}/clang/bin/llvm-symbolizer",
+ "JITTEST_EXTRA_ARGS": "--jitflags=debug --ignore-timeouts={DIR}/cgc-jittest-timeouts.txt",
+ "JSTESTS_EXTRA_ARGS": "--exclude-file={DIR}/cgc-jstests-slow.txt",
+ "TSAN_OPTIONS": "log_path={OUTDIR}/sanitize_log"
+ },
+ "max-errors": 14
+}
diff --git a/js/src/devtools/automation/variants/warnaserr b/js/src/devtools/automation/variants/warnaserr
new file mode 100644
index 000000000..a5f4e0e7c
--- /dev/null
+++ b/js/src/devtools/automation/variants/warnaserr
@@ -0,0 +1,4 @@
+{
+ "configure-args": "--enable-warnings-as-errors",
+ "optimize": true
+}
diff --git a/js/src/devtools/automation/variants/warnaserrdebug b/js/src/devtools/automation/variants/warnaserrdebug
new file mode 100644
index 000000000..ca1f14fef
--- /dev/null
+++ b/js/src/devtools/automation/variants/warnaserrdebug
@@ -0,0 +1,4 @@
+{
+ "configure-args": "--enable-warnings-as-errors",
+ "debug": true
+}
diff --git a/js/src/devtools/automation/winbuildenv.sh b/js/src/devtools/automation/winbuildenv.sh
new file mode 100644
index 000000000..f9d862ac4
--- /dev/null
+++ b/js/src/devtools/automation/winbuildenv.sh
@@ -0,0 +1,36 @@
+# We will be sourcing mozconfig files, which end up calling mk_add_options with
+# various settings. We only need the variable settings they create along the
+# way. Print them out, to be sucked up when running this file.
+mk_add_options() {
+ echo "$@"
+}
+
+topsrcdir="$SOURCE"
+
+# Tooltool installs in parent of topsrcdir for spidermonkey builds.
+# Resolve that path since the mozconfigs assume tooltool installs in
+# topsrcdir.
+VSPATH="$(cd ${topsrcdir}/.. && pwd)/vs2015u3"
+
+# When running on a developer machine, several variables will already
+# have the right settings and we will need to keep them since the
+# Windows mozconfigs overwrite them.
+echo "export ORIGINAL_INCLUDE=$INCLUDE"
+echo "export ORIGINAL_LIB=$LIB"
+echo "export ORIGINAL_LIBPATH=$LIBPATH"
+
+if [ -n "$USE_64BIT" ]; then
+ . $topsrcdir/build/win64/mozconfig.vs-latest
+else
+ . $topsrcdir/build/win32/mozconfig.vs-latest
+fi
+
+# PATH also needs to point to mozmake.exe, which can come from either
+# newer mozilla-build or tooltool.
+if ! which mozmake 2>/dev/null; then
+ export PATH="$PATH:$SOURCE/.."
+ if ! which mozmake 2>/dev/null; then
+ TT_SERVER=${TT_SERVER:-https://api.pub.build.mozilla.org/tooltool/}
+ ( cd $SOURCE/..; ./scripts/scripts/tooltool/tooltool_wrapper.sh $SOURCE/browser/config/tooltool-manifests/${platform:-win32}/releng.manifest $TT_SERVER setup.sh c:/mozilla-build/python27/python.exe C:/mozilla-build/tooltool.py )
+ fi
+fi
diff --git a/js/src/devtools/gc-ubench/benchmarks/bigTextNodes.js b/js/src/devtools/gc-ubench/benchmarks/bigTextNodes.js
new file mode 100644
index 000000000..1a6119f56
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/bigTextNodes.js
@@ -0,0 +1,25 @@
+window.tests.set('bigTextNodes', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [ textNode, textNode, ... ]",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbagePerFrame: "8",
+ defaultGarbageTotal: "8",
+
+ makeGarbage: (N) => {
+ var a = [];
+ var s = "x";
+ for (var i = 0; i < 16; i++)
+ s = s + s;
+ for (var i = 0; i < N; i++)
+ a.push(document.createTextNode(s));
+ garbage[garbageIndex++] = a;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/events.js b/js/src/devtools/gc-ubench/benchmarks/events.js
new file mode 100644
index 000000000..37f91f754
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/events.js
@@ -0,0 +1,25 @@
+window.tests.set('events', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [ textNode, textNode, ... ]",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbagePerFrame: "100K",
+ defaultGarbageTotal: "8",
+
+ makeGarbage: (N) => {
+ var a = [];
+ for (var i = 0; i < N; i++) {
+ var e = document.createEvent("Events");
+ e.initEvent("TestEvent", true, true);
+ a.push(e);
+ }
+ garbage[garbageIndex++] = a;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/expandoEvents.js b/js/src/devtools/gc-ubench/benchmarks/expandoEvents.js
new file mode 100644
index 000000000..70ff780c4
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/expandoEvents.js
@@ -0,0 +1,26 @@
+window.tests.set('expandoEvents', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [ textNode, textNode, ... ]",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbagePerFrame: "100K",
+ defaultGarbageTotal: "8",
+
+ makeGarbage: (N) => {
+ var a = [];
+ for (var i = 0; i < N; i++) {
+ var e = document.createEvent("Events");
+ e.initEvent("TestEvent", true, true);
+ e.color = ["tuna"];
+ a.push(e);
+ }
+ garbage[garbageIndex++] = a;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayArrayLiteral.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayArrayLiteral.js
new file mode 100644
index 000000000..9982090d8
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayArrayLiteral.js
@@ -0,0 +1,16 @@
+window.tests.set('globalArrayArrayLiteral', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [[], ....]",
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+ makeGarbage: (N) => {
+ for (var i = 0; i < N; i++) {
+ garbage[garbageIndex++] = ['foo', 'bar', 'baz', 'baa'];
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayBuffer.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayBuffer.js
new file mode 100644
index 000000000..4aee22ee7
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayBuffer.js
@@ -0,0 +1,23 @@
+window.tests.set('globalArrayBuffer', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = ArrayBuffer(N); # (large malloc data)",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbageTotal: "8K",
+ defaultGarbagePerFrame: "4M",
+
+ makeGarbage: (N) => {
+ var ab = new ArrayBuffer(N);
+ var view = new Uint8Array(ab);
+ view[0] = 1;
+ view[N - 1] = 2;
+ garbage[garbageIndex++] = ab;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayFgFinalized.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayFgFinalized.js
new file mode 100644
index 000000000..f2f914376
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayFgFinalized.js
@@ -0,0 +1,23 @@
+window.tests.set('globalArrayFgFinalized', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [ new Map, new Map, ... ]; # (foreground finalized)",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbageTotal: "8K",
+ defaultGarbagePerFrame: "1M",
+
+ makeGarbage: (N) => {
+ var arr = [];
+ for (var i = 0; i < N; i++) {
+ arr.push(new Map);
+ }
+ garbage[garbageIndex++] = arr;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayLargeArray.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayLargeArray.js
new file mode 100644
index 000000000..2c0ff3564
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayLargeArray.js
@@ -0,0 +1,18 @@
+window.tests.set('globalArrayLargeArray', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [[...], ....]",
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+ makeGarbage: (N) => {
+ var a = new Array(N);
+ for (var i = 0; i < N; i++) {
+ a[i] = N - i;
+ }
+ garbage[garbageIndex++] = a;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayLargeObject.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayLargeObject.js
new file mode 100644
index 000000000..d676c255e
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayLargeObject.js
@@ -0,0 +1,23 @@
+window.tests.set('globalArrayLargeObject', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = { LARGE }; # (large slots)",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbageTotal: "8K",
+ defaultGarbagePerFrame: "200K",
+
+ makeGarbage: (N) => {
+ var obj = {};
+ for (var i = 0; i < N; i++) {
+ obj["key" + i] = i;
+ }
+ garbage[garbageIndex++] = obj;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayNewObject.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayNewObject.js
new file mode 100644
index 000000000..2ab04dff0
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayNewObject.js
@@ -0,0 +1,16 @@
+window.tests.set('globalArrayNewObject', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [new Object(), ....]",
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+ makeGarbage: (N) => {
+ for (var i = 0; i < N; i++) {
+ garbage[garbageIndex++] = new Object();
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayObjectLiteral.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayObjectLiteral.js
new file mode 100644
index 000000000..7a8c1b2d6
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayObjectLiteral.js
@@ -0,0 +1,16 @@
+window.tests.set('globalArrayObjectLiteral', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [{}, ....]",
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+ makeGarbage: (N) => {
+ for (var i = 0; i < N; i++) {
+ garbage[garbageIndex++] = {a: 'foo', b: 'bar', 0: 'foo', 1: 'bar'};
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/globalArrayReallocArray.js b/js/src/devtools/gc-ubench/benchmarks/globalArrayReallocArray.js
new file mode 100644
index 000000000..6a7b16011
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/globalArrayReallocArray.js
@@ -0,0 +1,18 @@
+window.tests.set('globalArrayReallocArray', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [[,,,], ....]",
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+ makeGarbage: (N) => {
+ var a = [];
+ for (var i = 0; i < N; i++) {
+ a[i] = N - i;
+ }
+ garbage[garbageIndex++] = a;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/largeArrayPropertyAndElements.js b/js/src/devtools/gc-ubench/benchmarks/largeArrayPropertyAndElements.js
new file mode 100644
index 000000000..36ae04971
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/largeArrayPropertyAndElements.js
@@ -0,0 +1,34 @@
+window.tests.set('largeArrayPropertyAndElements', (function() {
+ var garbage;
+ var index;
+
+ return {
+ description: "Large array with both properties and elements",
+
+ load: n => {
+ garbage = new Array(n);
+ garbage.fill(null);
+ index = 0;
+ },
+
+ unload: () => {
+ garbage = null;
+ index = 0;
+ },
+
+ defaultGarbageTotal: "100K",
+ defaultGarbagePerFrame: "30K",
+
+ makeGarbage: n => {
+ for (var i = 0; i < n; i++) {
+ index++;
+ index %= garbage.length;
+
+ var obj = {};
+ garbage[index] = obj;
+ garbage["key-" + index] = obj;
+ }
+ }
+ };
+
+}()));
diff --git a/js/src/devtools/gc-ubench/benchmarks/noAllocation.js b/js/src/devtools/gc-ubench/benchmarks/noAllocation.js
new file mode 100644
index 000000000..5e01c309b
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/noAllocation.js
@@ -0,0 +1,6 @@
+window.tests.set('noAllocation', {
+ description: "Do not generate any garbage.",
+ load: (N) => {},
+ unload: () => {},
+ makeGarbage: (N) => {}
+});
diff --git a/js/src/devtools/gc-ubench/benchmarks/pairCyclicWeakMap.js b/js/src/devtools/gc-ubench/benchmarks/pairCyclicWeakMap.js
new file mode 100644
index 000000000..4dcb4d57e
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/pairCyclicWeakMap.js
@@ -0,0 +1,33 @@
+window.tests.set('pairCyclicWeakMap', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "wm1[k1] = k2; wm2[k2] = k3; wm1[k3] = k4; wm2[k4] = ...",
+
+ defaultGarbagePerFrame: "1K",
+ defaultGarbageTotal: "1K",
+
+ load: (N) => { garbage = new Array(N); },
+
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ makeGarbage: (M) => {
+ var wm1 = new WeakMap();
+ var wm2 = new WeakMap();
+ var initialKey = {};
+ var key = initialKey;
+ var value = {};
+ for (var i = 0; i < M/2; i++) {
+ wm1.set(key, value);
+ key = value;
+ value = {};
+ wm2.set(key, value);
+ key = value;
+ value = {};
+ }
+ garbage[garbageIndex++] = [ initialKey, wm1, wm2 ];
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/propertyTreeSplitting.js b/js/src/devtools/gc-ubench/benchmarks/propertyTreeSplitting.js
new file mode 100644
index 000000000..47001030f
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/propertyTreeSplitting.js
@@ -0,0 +1,25 @@
+window.tests.set('propertyTreeSplitting', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "use delete to generate Shape garbage",
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+ makeGarbage: (N) => {
+ function f()
+ {
+ var a1 = eval;
+ delete eval;
+ eval = a1;
+ var a3 = toString;
+ delete toString;
+ toString = a3;
+ }
+ for (var a = 0; a < N; ++a) {
+ garbage[garbageIndex++] = new f();
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/selfCyclicWeakMap.js b/js/src/devtools/gc-ubench/benchmarks/selfCyclicWeakMap.js
new file mode 100644
index 000000000..0a5ffb084
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/selfCyclicWeakMap.js
@@ -0,0 +1,29 @@
+window.tests.set('selfCyclicWeakMap', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var wm = new WeakMap(); wm[k1] = k2; wm[k2] = k3; ...",
+
+ defaultGarbagePerFrame: "1K",
+ defaultGarbageTotal: "1K",
+
+ load: (N) => { garbage = new Array(N); },
+
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ makeGarbage: (M) => {
+ var wm = new WeakMap();
+ var initialKey = {};
+ var key = initialKey;
+ var value = {};
+ for (var i = 0; i < M; i++) {
+ wm.set(key, value);
+ key = value;
+ value = {};
+ }
+ garbage[garbageIndex++] = [ initialKey, wm ];
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/benchmarks/textNodes.js b/js/src/devtools/gc-ubench/benchmarks/textNodes.js
new file mode 100644
index 000000000..257e5f726
--- /dev/null
+++ b/js/src/devtools/gc-ubench/benchmarks/textNodes.js
@@ -0,0 +1,23 @@
+window.tests.set('textNodes', (function() {
+var garbage = [];
+var garbageIndex = 0;
+return {
+ description: "var foo = [ textNode, textNode, ... ]",
+
+ load: (N) => { garbage = new Array(N); },
+ unload: () => { garbage = []; garbageIndex = 0; },
+
+ defaultGarbagePerFrame: "100K",
+ defaultGarbageTotal: "8",
+
+ makeGarbage: (N) => {
+ var a = [];
+ for (var i = 0; i < N; i++) {
+ a.push(document.createTextNode("t" + i));
+ }
+ garbage[garbageIndex++] = a;
+ if (garbageIndex == garbage.length)
+ garbageIndex = 0;
+ }
+};
+})());
diff --git a/js/src/devtools/gc-ubench/harness.js b/js/src/devtools/gc-ubench/harness.js
new file mode 100644
index 000000000..f217ff1cb
--- /dev/null
+++ b/js/src/devtools/gc-ubench/harness.js
@@ -0,0 +1,661 @@
+// Per-frame time sampling infra. Also GC'd: hopefully will not perturb things too badly.
+var numSamples = 500;
+var delays = new Array(numSamples);
+var gcs = new Array(numSamples);
+var minorGCs = new Array(numSamples);
+var gcBytes = new Array(numSamples);
+var mallocBytes = new Array(numSamples);
+var sampleIndex = 0;
+var sampleTime = 16; // ms
+var gHistogram = new Map(); // {ms: count}
+
+var features = {
+ trackingSizes: ('mozMemory' in performance),
+ showingGCs: ('mozMemory' in performance),
+};
+
+// Draw state.
+var stopped = 0;
+var start;
+var prev;
+var latencyGraph;
+var memoryGraph;
+var ctx;
+var memoryCtx;
+
+// Current test state.
+var activeTest = undefined;
+var testDuration = undefined; // ms
+var testState = 'idle'; // One of 'idle' or 'running'.
+var testStart = undefined; // ms
+var testQueue = [];
+
+// Global defaults
+var globalDefaultGarbageTotal = "8M";
+var globalDefaultGarbagePerFrame = "8K";
+
+function Graph(ctx) {
+ this.ctx = ctx;
+
+ var { width, height } = ctx.canvas;
+ this.layout = {
+ xAxisLabel_Y: height - 20,
+ };
+}
+
+Graph.prototype.xpos = index => index * 2;
+
+Graph.prototype.clear = function () {
+ var { width, height } = this.ctx.canvas;
+ this.ctx.clearRect(0, 0, width, height);
+};
+
+Graph.prototype.drawScale = function (delay)
+{
+ this.drawHBar(delay, `${delay}ms`, 'rgb(150,150,150)');
+}
+
+Graph.prototype.draw60fps = function () {
+ this.drawHBar(1000/60, '60fps', '#00cf61', 25);
+}
+
+Graph.prototype.draw30fps = function () {
+ this.drawHBar(1000/30, '30fps', '#cf0061', 25);
+}
+
+Graph.prototype.drawAxisLabels = function (x_label, y_label)
+{
+ var ctx = this.ctx;
+ var { width, height } = ctx.canvas;
+
+ ctx.fillText(x_label, width / 2, this.layout.xAxisLabel_Y);
+
+ ctx.save();
+ ctx.rotate(Math.PI/2);
+ var start = height / 2 - ctx.measureText(y_label).width / 2;
+ ctx.fillText(y_label, start, -width+20);
+ ctx.restore();
+}
+
+Graph.prototype.drawFrame = function () {
+ var ctx = this.ctx;
+ var { width, height } = ctx.canvas;
+
+ // Draw frame to show size
+ ctx.strokeStyle = 'rgb(0,0,0)';
+ ctx.fillStyle = 'rgb(0,0,0)';
+ ctx.beginPath();
+ ctx.moveTo(0, 0);
+ ctx.lineTo(width, 0);
+ ctx.lineTo(width, height);
+ ctx.lineTo(0, height);
+ ctx.closePath();
+ ctx.stroke();
+}
+
+function LatencyGraph(ctx) {
+ Graph.call(this, ctx);
+ console.log(this.ctx);
+}
+
+LatencyGraph.prototype = Object.create(Graph.prototype);
+
+Object.defineProperty(LatencyGraph.prototype, 'constructor', {
+ enumerable: false,
+ value: LatencyGraph });
+
+LatencyGraph.prototype.ypos = function (delay) {
+ var { height } = this.ctx.canvas;
+
+ var r = height + 100 - Math.log(delay) * 64;
+ if (r < 5) return 5;
+ return r;
+}
+
+LatencyGraph.prototype.drawHBar = function (delay, label, color='rgb(0,0,0)', label_offset=0)
+{
+ var ctx = this.ctx;
+
+ ctx.fillStyle = color;
+ ctx.strokeStyle = color;
+ ctx.fillText(label, this.xpos(numSamples) + 4 + label_offset, this.ypos(delay) + 3);
+
+ ctx.beginPath();
+ ctx.moveTo(this.xpos(0), this.ypos(delay));
+ ctx.lineTo(this.xpos(numSamples) + label_offset, this.ypos(delay));
+ ctx.stroke();
+ ctx.strokeStyle = 'rgb(0,0,0)';
+ ctx.fillStyle = 'rgb(0,0,0)';
+}
+
+LatencyGraph.prototype.draw = function () {
+ var ctx = this.ctx;
+
+ this.clear();
+ this.drawFrame();
+
+ for (var delay of [ 10, 20, 30, 50, 100, 200, 400, 800 ])
+ this.drawScale(delay);
+ this.draw60fps();
+ this.draw30fps();
+
+ var worst = 0, worstpos = 0;
+ ctx.beginPath();
+ for (var i = 0; i < numSamples; i++) {
+ ctx.lineTo(this.xpos(i), this.ypos(delays[i]));
+ if (delays[i] >= worst) {
+ worst = delays[i];
+ worstpos = i;
+ }
+ }
+ ctx.stroke();
+
+ // Draw vertical lines marking minor and major GCs
+ if (features.showingGCs) {
+ var { width, height } = ctx.canvas;
+
+ ctx.strokeStyle = 'rgb(255,100,0)';
+ var idx = sampleIndex % numSamples;
+ var gcCount = gcs[idx];
+ for (var i = 0; i < numSamples; i++) {
+ idx = (sampleIndex + i) % numSamples;
+ if (gcCount < gcs[idx]) {
+ ctx.beginPath();
+ ctx.moveTo(this.xpos(idx), 0);
+ ctx.lineTo(this.xpos(idx), this.layout.xAxisLabel_Y);
+ ctx.stroke();
+ }
+ gcCount = gcs[idx];
+ }
+
+ ctx.strokeStyle = 'rgb(0,255,100)';
+ idx = sampleIndex % numSamples;
+ gcCount = gcs[idx];
+ for (var i = 0; i < numSamples; i++) {
+ idx = (sampleIndex + i) % numSamples;
+ if (gcCount < minorGCs[idx]) {
+ ctx.beginPath();
+ ctx.moveTo(this.xpos(idx), 0);
+ ctx.lineTo(this.xpos(idx), 20);
+ ctx.stroke();
+ }
+ gcCount = minorGCs[idx];
+ }
+ }
+
+ ctx.fillStyle = 'rgb(255,0,0)';
+ if (worst)
+ ctx.fillText(`${worst.toFixed(2)}ms`, this.xpos(worstpos) - 10, this.ypos(worst) - 14);
+
+ // Mark and label the slowest frame
+ ctx.beginPath();
+ var where = sampleIndex % numSamples;
+ ctx.arc(this.xpos(where), this.ypos(delays[where]), 5, 0, Math.PI*2, true);
+ ctx.fill();
+ ctx.fillStyle = 'rgb(0,0,0)';
+
+ this.drawAxisLabels('Time', 'Pause between frames (log scale)');
+}
+
+function MemoryGraph(ctx) {
+ Graph.call(this, ctx);
+ this.worstEver = this.bestEver = performance.mozMemory.zone.gcBytes;
+ this.limit = Math.max(this.worstEver, performance.mozMemory.zone.gcAllocTrigger);
+}
+
+MemoryGraph.prototype = Object.create(Graph.prototype);
+
+Object.defineProperty(MemoryGraph.prototype, 'constructor', {
+ enumerable: false,
+ value: MemoryGraph });
+
+MemoryGraph.prototype.ypos = function (size) {
+ var { height } = this.ctx.canvas;
+
+ var range = this.limit - this.bestEver;
+ var percent = (size - this.bestEver) / range;
+
+ return (1 - percent) * height * 0.9 + 20;
+}
+
+MemoryGraph.prototype.drawHBar = function (size, label, color='rgb(150,150,150)')
+{
+ var ctx = this.ctx;
+
+ var y = this.ypos(size);
+
+ ctx.fillStyle = color;
+ ctx.strokeStyle = color;
+ ctx.fillText(label, this.xpos(numSamples) + 4, y + 3);
+
+ ctx.beginPath();
+ ctx.moveTo(this.xpos(0), y);
+ ctx.lineTo(this.xpos(numSamples), y);
+ ctx.stroke();
+ ctx.strokeStyle = 'rgb(0,0,0)';
+ ctx.fillStyle = 'rgb(0,0,0)';
+}
+
+function format_gcBytes(bytes) {
+ if (bytes < 4000)
+ return `${bytes} bytes`;
+ else if (bytes < 4e6)
+ return `${(bytes / 1024).toFixed(2)} KB`;
+ else if (bytes < 4e9)
+ return `${(bytes / 1024 / 1024).toFixed(2)} MB`;
+ else
+ return `${(bytes / 1024 / 1024 / 1024).toFixed(2)} GB`;
+};
+
+MemoryGraph.prototype.draw = function () {
+ var ctx = this.ctx;
+
+ this.clear();
+ this.drawFrame();
+
+ var worst = 0, worstpos = 0;
+ for (var i = 0; i < numSamples; i++) {
+ if (gcBytes[i] >= worst) {
+ worst = gcBytes[i];
+ worstpos = i;
+ }
+ if (gcBytes[i] < this.bestEver) {
+ this.bestEver = gcBytes[i];
+ }
+ }
+
+ if (this.worstEver < worst) {
+ this.worstEver = worst;
+ this.limit = Math.max(this.worstEver, performance.mozMemory.zone.gcAllocTrigger);
+ }
+
+ this.drawHBar(this.bestEver, `${format_gcBytes(this.bestEver)} min`, '#00cf61');
+ this.drawHBar(this.worstEver, `${format_gcBytes(this.worstEver)} max`, '#cc1111');
+ this.drawHBar(performance.mozMemory.zone.gcAllocTrigger, `${format_gcBytes(performance.mozMemory.zone.gcAllocTrigger)} trigger`, '#cc11cc');
+
+ ctx.fillStyle = 'rgb(255,0,0)';
+ if (worst)
+ ctx.fillText(format_gcBytes(worst), this.xpos(worstpos) - 10, this.ypos(worst) - 14);
+
+ ctx.beginPath();
+ var where = sampleIndex % numSamples;
+ ctx.arc(this.xpos(where), this.ypos(gcBytes[where]), 5, 0, Math.PI*2, true);
+ ctx.fill();
+
+ ctx.beginPath();
+ for (var i = 0; i < numSamples; i++) {
+ if (i == (sampleIndex + 1) % numSamples)
+ ctx.moveTo(this.xpos(i), this.ypos(gcBytes[i]));
+ else
+ ctx.lineTo(this.xpos(i), this.ypos(gcBytes[i]));
+ if (i == where)
+ ctx.stroke();
+ }
+ ctx.stroke();
+
+ this.drawAxisLabels('Time', 'Heap Memory Usage');
+}
+
+function stopstart()
+{
+ if (stopped) {
+ window.requestAnimationFrame(handler);
+ prev = performance.now();
+ start += prev - stopped;
+ document.getElementById('stop').value = 'Pause';
+ stopped = 0;
+ } else {
+ document.getElementById('stop').value = 'Resume';
+ stopped = performance.now();
+ }
+}
+
+var previous = 0;
+function handler(timestamp)
+{
+ if (stopped)
+ return;
+
+ if (testState === 'running' && (timestamp - testStart) > testDuration)
+ end_test(timestamp);
+
+ if (testState == 'running')
+ document.getElementById("test-progress").textContent = ((testDuration - (timestamp - testStart))/1000).toFixed(1) + " sec";
+
+ activeTest.makeGarbage(activeTest.garbagePerFrame);
+
+ var elt = document.getElementById('data');
+ var delay = timestamp - prev;
+ prev = timestamp;
+
+ // Take the histogram at 10us intervals so that we have enough resolution to capture.
+ // a 16.66[666] target with adequate accuracy.
+ update_histogram(gHistogram, Math.round(delay * 100));
+
+ var t = timestamp - start;
+ var newIndex = Math.round(t / sampleTime);
+ while (sampleIndex < newIndex) {
+ sampleIndex++;
+ var idx = sampleIndex % numSamples;
+ delays[idx] = delay;
+ if (features.trackingSizes)
+ gcBytes[idx] = performance.mozMemory.gcBytes;
+ if (features.showingGCs) {
+ gcs[idx] = performance.mozMemory.gcNumber;
+ minorGCs[idx] = performance.mozMemory.minorGCCount;
+ }
+ }
+
+ latencyGraph.draw();
+ if (memoryGraph)
+ memoryGraph.draw();
+ window.requestAnimationFrame(handler);
+}
+
+function summarize(arr) {
+ if (arr.length == 0)
+ return [];
+
+ var result = [];
+ var run_start = 0;
+ var prev = arr[0];
+ for (var i = 1; i <= arr.length; i++) {
+ if (i == arr.length || arr[i] != prev) {
+ if (i == run_start + 1) {
+ result.push(arr[i]);
+ } else {
+ result.push(prev + " x " + (i - run_start));
+ }
+ run_start = i;
+ }
+ if (i != arr.length)
+ prev = arr[i];
+ }
+
+ return result;
+}
+
+function update_histogram(histogram, delay)
+{
+ var current = histogram.has(delay) ? histogram.get(delay) : 0;
+ histogram.set(delay, ++current);
+}
+
+function reset_draw_state()
+{
+ for (var i = 0; i < numSamples; i++)
+ delays[i] = 0;
+ start = prev = performance.now();
+ sampleIndex = 0;
+}
+
+function onunload()
+{
+ if (activeTest)
+ activeTest.unload();
+ activeTest = undefined;
+}
+
+function onload()
+{
+ // Load initial test duration.
+ duration_changed();
+
+ // Load initial garbage size.
+ garbage_total_changed();
+ garbage_per_frame_changed();
+
+ // Populate the test selection dropdown.
+ var select = document.getElementById("test-selection");
+ for (var [name, test] of tests) {
+ test.name = name;
+ var option = document.createElement("option");
+ option.id = name;
+ option.text = name;
+ option.title = test.description;
+ select.add(option);
+ }
+
+ // Load the initial test.
+ change_active_test('noAllocation');
+
+ // Polyfill rAF.
+ var requestAnimationFrame =
+ window.requestAnimationFrame || window.mozRequestAnimationFrame ||
+ window.webkitRequestAnimationFrame || window.msRequestAnimationFrame;
+ window.requestAnimationFrame = requestAnimationFrame;
+
+ // Acquire our canvas.
+ var canvas = document.getElementById('graph');
+ latencyGraph = new LatencyGraph(canvas.getContext('2d'));
+
+ if (!performance.mozMemory) {
+ document.getElementById('memgraph-disabled').style.display = 'block';
+ document.getElementById('track-sizes-div').style.display = 'none';
+ }
+
+ trackHeapSizes(document.getElementById('track-sizes').checked);
+
+ // Start drawing.
+ reset_draw_state();
+ window.requestAnimationFrame(handler);
+}
+
+function run_one_test()
+{
+ start_test_cycle([activeTest.name]);
+}
+
+function run_all_tests()
+{
+ start_test_cycle(tests.keys());
+}
+
+function start_test_cycle(tests_to_run)
+{
+ // Convert from an iterable to an array for pop.
+ testQueue = [];
+ for (var key of tests_to_run)
+ testQueue.push(key);
+ testState = 'running';
+ testStart = performance.now();
+ gHistogram.clear();
+
+ start_test(testQueue.shift());
+ reset_draw_state();
+}
+
+function start_test(testName)
+{
+ change_active_test(testName);
+ console.log(`Running test: ${testName}`);
+ document.getElementById("test-selection").value = testName;
+}
+
+function end_test(timestamp)
+{
+ document.getElementById("test-progress").textContent = "(not running)";
+ report_test_result(activeTest, gHistogram);
+ gHistogram.clear();
+ console.log(`Ending test ${activeTest.name}`);
+ if (testQueue.length) {
+ start_test(testQueue.shift());
+ testStart = timestamp;
+ } else {
+ testState = 'idle';
+ testStart = 0;
+ }
+ reset_draw_state();
+}
+
+function report_test_result(test, histogram)
+{
+ var resultList = document.getElementById('results-display');
+ var resultElem = document.createElement("div");
+ var score = compute_test_score(histogram);
+ var sparks = compute_test_spark_histogram(histogram);
+ var params = `(${format_units(test.garbagePerFrame)},${format_units(test.garbageTotal)})`;
+ resultElem.innerHTML = `${score.toFixed(3)} ms/s : ${sparks} : ${test.name}${params} - ${test.description}`;
+ resultList.appendChild(resultElem);
+}
+
+// Compute a score based on the total ms we missed frames by per second.
+function compute_test_score(histogram)
+{
+ var score = 0;
+ for (var [delay, count] of histogram) {
+ delay = delay / 100;
+ score += Math.abs((delay - 16.66) * count);
+ }
+ score = score / (testDuration / 1000);
+ return Math.round(score * 1000) / 1000;
+}
+
+// Build a spark-lines histogram for the test results to show with the aggregate score.
+function compute_test_spark_histogram(histogram)
+{
+ var ranges = [
+ [-99999999, 16.6],
+ [16.6, 16.8],
+ [16.8, 25],
+ [25, 33.4],
+ [33.4, 60],
+ [60, 100],
+ [100, 300],
+ [300, 99999999],
+ ];
+ var rescaled = new Map();
+ for (var [delay, count] of histogram) {
+ delay = delay / 100;
+ for (var i = 0; i < ranges.length; ++i) {
+ var low = ranges[i][0];
+ var high = ranges[i][1];
+ if (low <= delay && delay < high) {
+ update_histogram(rescaled, i);
+ break;
+ }
+ }
+ }
+ var total = 0;
+ for (var [i, count] of rescaled)
+ total += count;
+ var sparks = "▁▂▃▄▅▆▇█";
+ var colors = ['#aaaa00', '#007700', '#dd0000', '#ff0000',
+ '#ff0000', '#ff0000', '#ff0000', '#ff0000'];
+ var line = "";
+ for (var i = 0; i < ranges.length; ++i) {
+ var amt = rescaled.has(i) ? rescaled.get(i) : 0;
+ var spark = sparks.charAt(parseInt(amt/total*8));
+ line += `<span style="color:${colors[i]}">${spark}</span>`;
+ }
+ return line;
+}
+
+function reload_active_test()
+{
+ activeTest.unload();
+ activeTest.load(activeTest.garbageTotal);
+}
+
+function change_active_test(new_test_name)
+{
+ if (activeTest)
+ activeTest.unload();
+ activeTest = tests.get(new_test_name);
+
+ if (!activeTest.garbagePerFrame)
+ activeTest.garbagePerFrame = parse_units(activeTest.defaultGarbagePerFrame || globalDefaultGarbagePerFrame);
+ if (!activeTest.garbageTotal)
+ activeTest.garbageTotal = parse_units(activeTest.defaultGarbageTotal || globalDefaultGarbageTotal);
+
+ document.getElementById("garbage-per-frame").value = format_units(activeTest.garbagePerFrame);
+ document.getElementById("garbage-total").value = format_units(activeTest.garbageTotal);
+
+ activeTest.load(activeTest.garbageTotal);
+}
+
+function duration_changed()
+{
+ var durationInput = document.getElementById('test-duration');
+ testDuration = parseInt(durationInput.value) * 1000;
+ console.log(`Updated test duration to: ${testDuration / 1000} seconds`);
+}
+
+function test_changed()
+{
+ var select = document.getElementById("test-selection");
+ console.log(`Switching to test: ${select.value}`);
+ change_active_test(select.value);
+ gHistogram.clear();
+ reset_draw_state();
+}
+
+function parse_units(v)
+{
+ if (v.length == 0)
+ return NaN;
+ var lastChar = v[v.length - 1].toLowerCase();
+ if (!isNaN(parseFloat(lastChar)))
+ return parseFloat(v);
+ var units = parseFloat(v.substr(0, v.length - 1));
+ if (lastChar == "k")
+ return units * 1e3;
+ if (lastChar == "m")
+ return units * 1e6;
+ if (lastChar == "g")
+ return units * 1e9;
+ return NaN;
+}
+
+function format_units(n)
+{
+ n = String(n);
+ if (n.length > 9 && n.substr(-9) == "000000000")
+ return n.substr(0, n.length - 9) + "G";
+ else if (n.length > 9 && n.substr(-6) == "000000")
+ return n.substr(0, n.length - 6) + "M";
+ else if (n.length > 3 && n.substr(-3) == "000")
+ return n.substr(0, n.length - 3) + "K";
+ else
+ return String(n);
+}
+
+function garbage_total_changed()
+{
+ var value = parse_units(document.getElementById('garbage-total').value);
+ if (isNaN(value))
+ return;
+ if (activeTest) {
+ activeTest.garbageTotal = value;
+ console.log(`Updated garbage-total to ${activeTest.garbageTotal} items`);
+ reload_active_test();
+ }
+ gHistogram.clear();
+ reset_draw_state();
+}
+
+function garbage_per_frame_changed()
+{
+ var value = parse_units(document.getElementById('garbage-per-frame').value);
+ if (isNaN(value))
+ return;
+ if (activeTest) {
+ activeTest.garbagePerFrame = value;
+ console.log(`Updated garbage-per-frame to ${activeTest.garbagePerFrame} items`);
+ }
+}
+
+function trackHeapSizes(track)
+{
+ features.trackingSizes = track;
+
+ var canvas = document.getElementById('memgraph');
+
+ if (features.trackingSizes) {
+ canvas.style.display = 'block';
+ memoryGraph = new MemoryGraph(canvas.getContext('2d'));
+ } else {
+ canvas.style.display = 'none';
+ memoryGraph = null;
+ }
+}
diff --git a/js/src/devtools/gc-ubench/index.html b/js/src/devtools/gc-ubench/index.html
new file mode 100644
index 000000000..d5ab2a083
--- /dev/null
+++ b/js/src/devtools/gc-ubench/index.html
@@ -0,0 +1,84 @@
+<html>
+<head>
+ <title>GC uBench</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+
+ <!-- Include benchmark modules. -->
+ <script>var tests = new Map();</script>
+ <script src="benchmarks/noAllocation.js"></script>
+ <script src="benchmarks/globalArrayNewObject.js"></script>
+ <script src="benchmarks/globalArrayArrayLiteral.js"></script>
+ <script src="benchmarks/globalArrayLargeArray.js"></script>
+ <script src="benchmarks/globalArrayLargeObject.js"></script>
+ <script src="benchmarks/globalArrayObjectLiteral.js"></script>
+ <script src="benchmarks/globalArrayReallocArray.js"></script>
+ <script src="benchmarks/globalArrayBuffer.js"></script>
+ <script src="benchmarks/globalArrayFgFinalized.js"></script>
+ <script src="benchmarks/largeArrayPropertyAndElements.js"></script>
+ <script src="benchmarks/selfCyclicWeakMap.js"></script>
+ <script src="benchmarks/pairCyclicWeakMap.js"></script>
+ <script src="benchmarks/textNodes.js"></script>
+ <script src="benchmarks/bigTextNodes.js"></script>
+ <script src="benchmarks/events.js"></script>
+ <script src="benchmarks/expandoEvents.js"></script>
+ <script src="benchmarks/propertyTreeSplitting.js"></script>
+
+ <script src="harness.js"></script>
+
+</head>
+
+<body onload="onload()" onunload="onunload()">
+
+<canvas id="graph" width="1080" height="400" style="padding-left:10px"></canvas>
+<canvas id="memgraph" width="1080" height="400" style="padding-left:10px"></canvas>
+<div id="memgraph-disabled" style="display: none"><i>No performance.mozMemory object available. Set dom.enable_memory_stats to True to see heap size info.</i></div>
+
+<hr>
+
+<div id='track-sizes-div'>
+ Show heap size graph: <input id='track-sizes' type='checkbox' onclick="trackHeapSizes(this.checked)">
+</div>
+
+<div>
+ <input type="button" id="stop" value="Pause" onclick="stopstart()"></input>
+</div>
+
+<div>
+ Duration: <input type="text" id="test-duration" size="3" value="8" onchange="duration_changed()"></input>s
+ <input type="button" id="test-one" value="Run Test" onclick="run_one_test()"></input>
+ <input type="button" id="test-all" value="Run All Tests" onclick="run_all_tests()"></input>
+</div>
+
+<div>
+ Currently running test load:
+ <select id="test-selection" required onchange="test_changed()"></select>
+</div>
+
+<div>
+ &nbsp;&nbsp;&nbsp;&nbsp;Time remaining: <span id="test-progress">(not running)</span>
+</div
+
+<div>
+ &nbsp;&nbsp;&nbsp;&nbsp;60 fps: <span id="pct60">n/a</span>
+ &nbsp;&nbsp;&nbsp;&nbsp;45 fps: <span id="pct45">n/a</span>
+ &nbsp;&nbsp;&nbsp;&nbsp;30 fps: <span id="pct30">n/a</span>
+</div
+
+<div>
+ &nbsp;&nbsp;&nbsp;&nbsp;Garbage items per frame:
+ <input type="text" id="garbage-per-frame" size="5" value="8K"
+ onchange="garbage_per_frame_changed()"></input>
+</div>
+<div>
+ &nbsp;&nbsp;&nbsp;&nbsp;Garbage piles:
+ <input type="text" id="garbage-total" size="5" value="8M"
+ onchange="garbage_total_changed()"></input>
+</div>
+
+<div id="results-Area">
+ Test Results:
+ <div id="results-display" style="padding-left: 10px; border: 1px solid black;"></div>
+</div>
+
+</body>
+</html>
diff --git a/js/src/devtools/gc/README.txt b/js/src/devtools/gc/README.txt
new file mode 100644
index 000000000..f4f37efba
--- /dev/null
+++ b/js/src/devtools/gc/README.txt
@@ -0,0 +1,6 @@
+Usage:
+
+Requirements:
+1) The shell has to be compiled with --enable-gctimer
+
+Tested with python2.6
diff --git a/js/src/devtools/gc/gc-test.py b/js/src/devtools/gc/gc-test.py
new file mode 100644
index 000000000..dd2097804
--- /dev/null
+++ b/js/src/devtools/gc/gc-test.py
@@ -0,0 +1,166 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Works with python2.6
+
+import datetime, os, re, sys, traceback
+import math, string, copy, json
+import subprocess
+from subprocess import *
+from operator import itemgetter
+
+class Test:
+ def __init__(self, path, name):
+ self.path = path
+ self.name = name
+
+ @classmethod
+ def from_file(cls, path, name, options):
+ return cls(path, name)
+
+def find_tests(dir, substring = None):
+ ans = []
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if dirpath == '.':
+ continue
+ for filename in filenames:
+ if not filename.endswith('.js'):
+ continue
+ test = os.path.join(dirpath, filename)
+ if substring is None or substring in os.path.relpath(test, dir):
+ ans.append([test, filename])
+ return ans
+
+def get_test_cmd(path):
+ return [ JS, '-f', path ]
+
+def avg(seq):
+ return sum(seq) / len(seq)
+
+def stddev(seq, mean):
+ diffs = ((float(item) - mean) ** 2 for item in seq)
+ return math.sqrt(sum(diffs) / len(seq))
+
+def run_test(test):
+ env = os.environ.copy()
+ env['MOZ_GCTIMER'] = 'stderr'
+ cmd = get_test_cmd(test.path)
+ total = []
+ mark = []
+ sweep = []
+ close_fds = sys.platform != 'win32'
+ p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=close_fds, env=env)
+ out, err = p.communicate()
+ out, err = out.decode(), err.decode()
+
+ float_array = [float(_) for _ in err.split()]
+
+ if len(float_array) == 0:
+ print('Error: No data from application. Configured with --enable-gctimer?')
+ sys.exit(1)
+
+ for i, currItem in enumerate(float_array):
+ if (i % 3 == 0):
+ total.append(currItem)
+ else:
+ if (i % 3 == 1):
+ mark.append(currItem)
+ else:
+ sweep.append(currItem)
+
+ return max(total), avg(total), max(mark), avg(mark), max(sweep), avg(sweep)
+
+def run_tests(tests, test_dir):
+ bench_map = {}
+
+ try:
+ for i, test in enumerate(tests):
+ filename_str = '"%s"' % test.name
+ TMax, TAvg, MMax, MAvg, SMax, SAvg = run_test(test)
+ bench_map[test.name] = [TMax, TAvg, MMax, MAvg, SMax, SAvg]
+ fmt = '%20s: {"TMax": %4.1f, "TAvg": %4.1f, "MMax": %4.1f, "MAvg": %4.1f, "SMax": %4.1f, "SAvg": %4.1f}'
+ if (i != len(tests) - 1):
+ fmt += ','
+ print(fmt %(filename_str ,TMax, TAvg, MMax, MAvg, SMax, MAvg))
+ except KeyboardInterrupt:
+ print('fail')
+
+ return dict((filename, dict(TMax=TMax, TAvg=TAvg, MMax=MMax, MAvg=MAvg, SMax=SMax, SAvg=SAvg))
+ for filename, (TMax, TAvg, MMax, MAvg, SMax, SAvg) in bench_map.iteritems())
+
+def compare(current, baseline):
+ percent_speedups = []
+ for key, current_result in current.iteritems():
+ try:
+ baseline_result = baseline[key]
+ except KeyError:
+ print key, 'missing from baseline'
+ continue
+
+ val_getter = itemgetter('TMax', 'TAvg', 'MMax', 'MAvg', 'SMax', 'SAvg')
+ BTMax, BTAvg, BMMax, BMAvg, BSMax, BSAvg = val_getter(baseline_result)
+ CTMax, CTAvg, CMMax, CMAvg, CSMax, CSAvg = val_getter(current_result)
+
+ fmt = '%30s: %s'
+ if CTAvg <= BTAvg:
+ speedup = (CTAvg / BTAvg - 1) * 100
+ result = 'faster: %6.2f < baseline %6.2f (%+6.2f%%)' % \
+ (CTAvg, BTAvg, speedup)
+ percent_speedups.append(speedup)
+ else:
+ slowdown = (CTAvg / BTAvg - 1) * 100
+ result = 'SLOWER: %6.2f > baseline %6.2f (%+6.2f%%) ' % \
+ (CTAvg, BTAvg, slowdown)
+ percent_speedups.append(slowdown)
+ print '%30s: %s' % (key, result)
+ if percent_speedups:
+ print 'Average speedup: %.2f%%' % avg(percent_speedups)
+
+if __name__ == '__main__':
+ script_path = os.path.abspath(__file__)
+ script_dir = os.path.dirname(script_path)
+ test_dir = os.path.join(script_dir, 'tests')
+
+ from optparse import OptionParser
+ op = OptionParser(usage='%prog [options] JS_SHELL [TESTS]')
+
+ op.add_option('-b', '--baseline', metavar='JSON_PATH',
+ dest='baseline_path', help='json file with baseline values to '
+ 'compare against')
+
+ (OPTIONS, args) = op.parse_args()
+ if len(args) < 1:
+ op.error('missing JS_SHELL argument')
+ # We need to make sure we are using backslashes on Windows.
+ JS, test_args = os.path.normpath(args[0]), args[1:]
+
+ test_list = []
+ bench_map = {}
+
+ test_list = find_tests(test_dir)
+
+ if not test_list:
+ print >> sys.stderr, "No tests found matching command line arguments."
+ sys.exit(0)
+
+ test_list = [ Test.from_file(tst, name, OPTIONS) for tst, name in test_list ]
+
+ try:
+ print("{")
+ bench_map = run_tests(test_list, test_dir)
+ print("}")
+
+ except OSError:
+ if not os.path.exists(JS):
+ print >> sys.stderr, "JS shell argument: file does not exist: '%s'"%JS
+ sys.exit(1)
+ else:
+ raise
+
+ if OPTIONS.baseline_path:
+ baseline_map = []
+ fh = open(OPTIONS.baseline_path, 'r')
+ baseline_map = json.load(fh)
+ fh.close()
+ compare(current=bench_map, baseline=baseline_map)
diff --git a/js/src/devtools/gc/tests/clock.js b/js/src/devtools/gc/tests/clock.js
new file mode 100644
index 000000000..fd2fb985f
--- /dev/null
+++ b/js/src/devtools/gc/tests/clock.js
@@ -0,0 +1,35 @@
+//Shell version of Clock Benchmark: https://bug548388.bugzilla.mozilla.org/attachment.cgi?id=434576
+
+var t0;
+var tl;
+
+function alloc(dt) {
+ if (dt > 100)
+ dt = 100;
+ for (var i = 0; i < dt * 1000; ++i) {
+ var o = new String("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
+ }
+}
+
+function cycle() {
+ if (!running)
+ return;
+
+ var t1 = new Date;
+ if (t0 == undefined) t0 = t1;
+
+ if (tl != undefined) {
+ var dt = t1 - tl;
+ alloc(dt);
+ }
+
+ tl = t1;
+
+ if(t1 - t0 > (5 * 1000))
+ running = false;
+}
+
+var running = true;
+while(running)
+ cycle();
+
diff --git a/js/src/devtools/gc/tests/dslots.js b/js/src/devtools/gc/tests/dslots.js
new file mode 100644
index 000000000..8fcb6e8aa
--- /dev/null
+++ b/js/src/devtools/gc/tests/dslots.js
@@ -0,0 +1,26 @@
+//Benchmark to measure overhead of dslots allocation and deallocation
+
+function Object0() {};
+function Object1() { this.a=1; };
+function Object2() { this.a=1; this.b=1; };
+function Object3() { this.a=1; this.b=1; this.c=1; };
+function Object4() { this.a=1; this.b=1; this.c=1; this.d=1; };
+function Object5() { this.a=1; this.b=1; this.c=1; this.d=1; this.e=1; };
+
+function test() {
+ var N = 1e5;
+ gc();
+
+ for(var i = 0; i<=5; i++)
+ {
+ var tmp = i==0 ? Object0 : i==1 ? Object1 : i==2 ? Object2 : i==3 ? Object3 : i==4 ? Object4 : Object5;
+ for (var j = 0; j != N; j++) {
+ var a = new tmp();
+ }
+ gc();
+ }
+}
+
+for(var i = 0; i<=5; i++) {
+ test();
+}
diff --git a/js/src/devtools/gc/tests/loops.js b/js/src/devtools/gc/tests/loops.js
new file mode 100644
index 000000000..a99961a3e
--- /dev/null
+++ b/js/src/devtools/gc/tests/loops.js
@@ -0,0 +1,55 @@
+//Measure plain GC.
+
+var t = [];
+var N = 500000
+
+for(var i = 0; i < N; i++)
+ t[i] = {};
+
+gc()
+
+t = [];
+
+gc();
+
+for(var i = 0; i < N; i++)
+ t[i] = ({});
+
+gc();
+
+t = [];
+
+gc();
+
+
+for(var i = 0; i < N; i++)
+ t[i] = "asdf";
+
+gc();
+
+t = [];
+
+gc();
+
+
+for(var i = 0; i < N; i++)
+ t[i] = 1.12345;
+
+gc();
+
+t=[];
+
+gc();
+
+for(var i = 0; i < N; i++) {
+ t[i] = ({});
+ if (i != 0)
+ t[i].a = t[i-1];
+}
+
+gc();
+
+t = [];
+
+gc();
+
diff --git a/js/src/devtools/gc/tests/objGraph.js b/js/src/devtools/gc/tests/objGraph.js
new file mode 100644
index 000000000..607633173
--- /dev/null
+++ b/js/src/devtools/gc/tests/objGraph.js
@@ -0,0 +1,37 @@
+test();
+
+function test()
+{
+ function generate_big_object_graph()
+ {
+ var root = {};
+ f(root, 17);
+ return root;
+ function f(parent, depth) {
+ if (depth == 0)
+ return;
+ --depth;
+
+ f(parent.a = {}, depth);
+ f(parent.b = {}, depth);
+ }
+ }
+
+ function f(obj) {
+ with (obj)
+ return arguments;
+ }
+
+ for(var i = 0; i != 10; ++i)
+ {
+ gc();
+ var x = null;
+ x = f(generate_big_object_graph());
+
+ gc(); //all used
+
+ x = null;
+
+ gc(); //all free
+ }
+}
diff --git a/js/src/devtools/gctrace/Makefile b/js/src/devtools/gctrace/Makefile
new file mode 100644
index 000000000..f0985286a
--- /dev/null
+++ b/js/src/devtools/gctrace/Makefile
@@ -0,0 +1,6 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+gcstats: gcstats.cpp ../../gc/GCTraceFormat.h Makefile
+ $(CXX) -std=c++11 -g -O2 -I../.. -o $@ $<
diff --git a/js/src/devtools/gctrace/gcstats.cpp b/js/src/devtools/gctrace/gcstats.cpp
new file mode 100644
index 000000000..865e65839
--- /dev/null
+++ b/js/src/devtools/gctrace/gcstats.cpp
@@ -0,0 +1,873 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=78:
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*
+ * Read and process GC trace logs.
+ */
+
+#include "gc/GCTraceFormat.h"
+
+#define __STDC_FORMAT_MACROS
+
+#include <assert.h>
+#include <inttypes.h>
+#include <math.h>
+#include <stdarg.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <algorithm>
+#include <functional>
+#include <unordered_map>
+#include <vector>
+
+// State of the program
+
+enum Heap
+{
+ Nursery,
+ TenuredHeap,
+
+ HeapKinds
+};
+
+enum FinalizerKind
+{
+ NoFinalizer,
+ HasFinalizer,
+
+ FinalizerKinds
+};
+
+enum State
+{
+ StateMutator,
+ StateMinorGC,
+ StateMajorGC
+};
+
+typedef uint64_t address;
+typedef uint8_t AllocKind;
+typedef uint8_t ClassId;
+typedef uint64_t TypeId;
+
+struct AllocInfo
+{
+ const uint64_t serial;
+ const AllocKind kind;
+ const Heap initialHeap;
+ TypeId typeId;
+
+ AllocInfo(uint64_t allocCount, uint8_t kind, Heap loc)
+ : serial(allocCount), kind(kind), initialHeap(loc), typeId(0)
+ {
+ assert(kind < AllocKinds);
+ assert(initialHeap < HeapKinds);
+ }
+};
+
+struct ClassInfo
+{
+ const ClassId id;
+ const char* name;
+ const uint32_t flags;
+ const FinalizerKind hasFinalizer;
+
+ ClassInfo(ClassId id, const char* name, uint32_t flags, FinalizerKind hasFinalizer)
+ : id(id), name(name), flags(flags), hasFinalizer(hasFinalizer) {}
+};
+
+struct TypeInfo
+{
+ const TypeId id;
+ const ClassId classId;
+ const uint32_t flags;
+ const char* name;
+
+ TypeInfo(TypeId id, ClassId classId, uint32_t flags)
+ : id(id), classId(classId), flags(flags), name(nullptr) {}
+
+ const char* getName() {
+ if (name)
+ return name;
+ static char buffer[32];
+ sprintf(buffer, "type %ld", id);
+ return buffer;
+ }
+};
+
+typedef std::unordered_map<address, AllocInfo> AllocMap;
+typedef std::unordered_map<address, ClassId> ClassMap;
+typedef std::vector<ClassInfo> ClassVector;
+typedef std::unordered_map<address, TypeId> TypeMap;
+typedef std::vector<TypeInfo> TypeVector;
+
+uint64_t thingSizes[AllocKinds];
+AllocMap nurseryThings;
+AllocMap tenuredThings;
+ClassMap classMap;
+ClassVector classes;
+TypeMap typeMap;
+TypeVector types;
+uint64_t allocCount = 0;
+
+// Collected data
+
+const unsigned MaxClasses = 128;
+const unsigned LifetimeBinLog = 10;
+const unsigned MaxLifetimeBins = 40;
+
+const unsigned AugHeapKinds = HeapKinds + 1;
+const unsigned HeapTotal = HeapKinds;
+const unsigned AugAllocKinds = AllocKinds + 1;
+const unsigned AllocKindTotal = AllocKinds;
+const unsigned AugLifetimeBins = MaxLifetimeBins + 1;
+const unsigned LifetimeBinTotal = MaxLifetimeBins;
+const unsigned AugClasses = MaxClasses + 1;
+const unsigned ClassTotal = MaxClasses;
+
+struct EmptyArrayTag {};
+
+template <typename T, size_t length>
+struct Array
+{
+ Array() {}
+ Array(const EmptyArrayTag&) { zero(); }
+ void zero() { memset(&elements, 0, sizeof(elements)); }
+ T& operator[](size_t index) {
+ assert(index < length);
+ return elements[index];
+ }
+ private:
+ T elements[length];
+};
+
+unsigned timesliceSize;
+unsigned lifetimeBins;
+std::vector<uint64_t> gcBytesAllocatedInSlice;
+std::vector<uint64_t> gcBytesFreedInSlice;
+
+Array<Array<uint64_t, AllocKinds>, HeapKinds> allocCountByHeapAndKind;
+Array<Array<uint64_t, MaxLifetimeBins>, HeapKinds> allocCountByHeapAndLifetime;
+Array<Array<Array<uint64_t, MaxLifetimeBins>, AllocKinds>, HeapKinds> allocCountByHeapKindAndLifetime;
+Array<uint64_t, MaxClasses> objectCountByClass;
+std::vector<uint64_t> objectCountByType;
+Array<Array<uint64_t, MaxClasses>, HeapKinds> objectCountByHeapAndClass;
+Array<Array<Array<uint64_t, MaxLifetimeBins>, MaxClasses>, HeapKinds> objectCountByHeapClassAndLifetime;
+Array<Array<uint64_t, MaxLifetimeBins>, FinalizerKinds> heapObjectCountByFinalizerAndLifetime;
+Array<Array<uint64_t, MaxLifetimeBins>, MaxClasses> finalizedHeapObjectCountByClassAndLifetime;
+std::vector<Array<Array<uint64_t, MaxLifetimeBins>, HeapKinds> > objectCountByTypeHeapAndLifetime;
+
+static void
+MOZ_FORMAT_PRINTF(1, 2)
+die(const char* format, ...)
+{
+ va_list va;
+ va_start(va, format);
+ vfprintf(stderr, format, va);
+ fprintf(stderr, "\n");
+ va_end(va);
+ exit(1);
+}
+
+const uint64_t FirstBinSize = 100;
+const unsigned BinLog = 2;
+
+static unsigned
+getBin(uint64_t lifetime)
+{
+ /*
+ * Calculate a bin number for a given lifetime.
+ *
+ * We use a logarithmic scale, starting with a bin size of 100 and doubling
+ * from there.
+ */
+ static double logDivisor = log(BinLog);
+ if (lifetime < FirstBinSize)
+ return 0;
+ return unsigned(log(lifetime / FirstBinSize) / logDivisor) + 1;
+}
+
+static unsigned
+binLimit(unsigned bin)
+{
+ return unsigned(pow(BinLog, bin) * FirstBinSize);
+}
+
+static void
+testBinning()
+{
+ assert(getBin(0) == 0);
+ assert(getBin(FirstBinSize - 1) == 0);
+ assert(getBin(FirstBinSize) == 1);
+ assert(getBin(2 * FirstBinSize - 1) == 1);
+ assert(getBin(2 * FirstBinSize) == 2);
+ assert(getBin(4 * FirstBinSize - 1) == 2);
+ assert(getBin(4 * FirstBinSize) == 3);
+ assert(binLimit(0) == FirstBinSize);
+ assert(binLimit(1) == 2 * FirstBinSize);
+ assert(binLimit(2) == 4 * FirstBinSize);
+ assert(binLimit(3) == 8 * FirstBinSize);
+}
+
+static const char*
+allocKindName(AllocKind kind)
+{
+ static const char* AllocKindNames[] = {
+ "Object0",
+ "Object0Bg",
+ "Object2",
+ "Object2Bg",
+ "Object4",
+ "Object4Bg",
+ "Object8",
+ "Object8Bg",
+ "Object12",
+ "Object12Bg",
+ "Object16",
+ "Object16Bg",
+ "Script",
+ "LazyScript",
+ "Shape",
+ "BaseShape",
+ "TypeObject",
+ "FatInlineString",
+ "String",
+ "ExternalString",
+ "Symbol",
+ "JitCode",
+ "Total"
+ };
+ assert(sizeof(AllocKindNames) / sizeof(const char*) == AugAllocKinds);
+ assert(kind < AugAllocKinds);
+ return AllocKindNames[kind];
+}
+
+static const char*
+heapName(unsigned heap)
+{
+ static const char* HeapNames[] = {
+ "nursery",
+ "tenured heap",
+ "all"
+ };
+ assert(heap < AugHeapKinds);
+ return HeapNames[heap];
+}
+
+
+static const char*
+heapLabel(unsigned heap)
+{
+ static const char* HeapLabels[] = {
+ "Nursery",
+ "Tenured heap",
+ "Total"
+ };
+ assert(heap < AugHeapKinds);
+ return HeapLabels[heap];
+}
+
+static void
+outputGcBytesAllocated(FILE* file)
+{
+ fprintf(file, "# Total GC bytes allocated by timeslice\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Time, GCBytesAllocated\n");
+
+ uint64_t timesliceCount = allocCount / timesliceSize + 1;
+ uint64_t total = 0;
+ for (uint64_t i = 0; i < timesliceCount; ++i) {
+ total += gcBytesAllocatedInSlice[i];
+ fprintf(file, "%12" PRIu64 ", %12" PRIu64 "\n", i * timesliceSize, total);
+ }
+}
+
+static void
+outputGcBytesUsed(FILE* file)
+{
+ fprintf(file, "# Total GC bytes used by timeslice\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Time, GCBytesUsed\n");
+
+ uint64_t timesliceCount = allocCount / timesliceSize + 1;
+ uint64_t total = 0;
+ for (uint64_t i = 0; i < timesliceCount; ++i) {
+ total += gcBytesAllocatedInSlice[i] - gcBytesFreedInSlice[i];
+ fprintf(file, "%12" PRIu64 ", %12" PRIu64 "\n", i * timesliceSize, total);
+ }
+}
+
+static void
+outputThingCounts(FILE* file)
+{
+ fprintf(file, "# GC thing allocation count in nursery and tenured heap by kind\n");
+ fprintf(file, "# This shows what kind of things we are allocating in the nursery\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Kind, Nursery, Tenured heap\n");
+ for (unsigned i = 0; i < AllocKinds; ++i) {
+ fprintf(file, "%15s, %8" PRIu64 ", %8" PRIu64 "\n", allocKindName(i),
+ allocCountByHeapAndKind[Nursery][i],
+ allocCountByHeapAndKind[TenuredHeap][i]);
+ }
+}
+
+static void
+outputObjectCounts(FILE* file)
+{
+ fprintf(file, "# Object allocation count in nursery and tenured heap by class\n");
+ fprintf(file, "# This shows what kind of objects we are allocating in the nursery\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Class, Nursery, Tenured heap, Total\n");
+ for (unsigned i = 0; i < classes.size(); ++i) {
+ fprintf(file, "%30s, %8" PRIu64 ", %8" PRIu64 ", %8" PRIu64 "\n",
+ classes[i].name,
+ objectCountByHeapAndClass[Nursery][i],
+ objectCountByHeapAndClass[TenuredHeap][i],
+ objectCountByClass[i]);
+ }
+}
+
+static void
+outputLifetimeByHeap(FILE* file)
+{
+ fprintf(file, "# Lifetime of all things (in log2 bins) by initial heap\n");
+ fprintf(file, "# NB invalid unless execution was traced with appropriate zeal\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Lifetime");
+ for (unsigned i = 0; i < HeapKinds; ++i)
+ fprintf(file, ", %s", heapLabel(i));
+ fprintf(file, "\n");
+
+ for (unsigned i = 0; i < lifetimeBins; ++i) {
+ fprintf(file, "%8d", binLimit(i));
+ for (unsigned j = 0; j < HeapKinds; ++j)
+ fprintf(file, ", %8" PRIu64, allocCountByHeapAndLifetime[j][i]);
+ fprintf(file, "\n");
+ }
+}
+
+static void
+outputLifetimeByHasFinalizer(FILE* file)
+{
+ fprintf(file, "# Lifetime of heap allocated objects by prescence of finalizer\n");
+ fprintf(file, "# NB invalid unless execution was traced with appropriate zeal\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Lifetime, NoFinalizer, HasFinalizer\n");
+
+ for (unsigned i = 0; i < lifetimeBins; ++i) {
+ fprintf(file, "%8d", binLimit(i));
+ for (unsigned j = 0; j < FinalizerKinds; ++j)
+ fprintf(file, ", %8" PRIu64,
+ heapObjectCountByFinalizerAndLifetime[j][i]);
+ fprintf(file, "\n");
+ }
+}
+
+static void
+outputFinalizedHeapObjectLifetimeByClass(FILE* file)
+{
+ fprintf(file, "# Lifetime of finalized heap objects by class\n");
+ fprintf(file, "# NB invalid unless execution was traced with appropriate zeal\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Lifetime");
+ for (unsigned i = 0; i < classes.size(); ++i)
+ fprintf(file, ", %15s", classes[i].name);
+ fprintf(file, "\n");
+
+ for (unsigned i = 0; i < lifetimeBins; ++i) {
+ fprintf(file, "%8d", binLimit(i));
+ for (unsigned j = 0; j < classes.size(); ++j) {
+ fprintf(file, ", %8" PRIu64,
+ finalizedHeapObjectCountByClassAndLifetime[j][i]);
+ }
+ fprintf(file, "\n");
+ }
+}
+
+static void
+outputLifetimeByKind(FILE* file, unsigned initialHeap)
+{
+ assert(initialHeap < AugHeapKinds);
+
+ fprintf(file, "# Lifetime of %s things (in log2 bins) by kind\n", heapName(initialHeap));
+ fprintf(file, "# NB invalid unless execution was traced with appropriate zeal\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Lifetime");
+ for (unsigned i = 0; i < AllocKinds; ++i)
+ fprintf(file, ", %15s", allocKindName(i));
+ fprintf(file, "\n");
+
+ for (unsigned i = 0; i < lifetimeBins; ++i) {
+ fprintf(file, "%8d", binLimit(i));
+ for (unsigned j = 0; j < AllocKinds; ++j)
+ fprintf(file, ", %8" PRIu64,
+ allocCountByHeapKindAndLifetime[initialHeap][j][i]);
+ fprintf(file, "\n");
+ }
+}
+
+static void
+outputLifetimeByClass(FILE* file, unsigned initialHeap)
+{
+ assert(initialHeap < AugHeapKinds);
+
+ fprintf(file, "# Lifetime of %s things (in log2 bins) by class\n", heapName(initialHeap));
+ fprintf(file, "# NB invalid unless execution was traced with appropriate zeal\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+ fprintf(file, "Lifetime");
+ for (unsigned i = 0; i < classes.size(); ++i)
+ fprintf(file, ", %15s", classes[i].name);
+ fprintf(file, "\n");
+
+ for (unsigned i = 0; i < lifetimeBins; ++i) {
+ fprintf(file, "%8d", binLimit(i));
+ for (unsigned j = 0; j < classes.size(); ++j)
+ fprintf(file, ", %8" PRIu64,
+ objectCountByHeapClassAndLifetime[initialHeap][j][i]);
+ fprintf(file, "\n");
+ }
+}
+
+static void
+outputLifetimeByType(FILE* file, unsigned initialHeap)
+{
+ assert(initialHeap < AugHeapKinds);
+
+ fprintf(file, "# Lifetime of %s things (in log2 bins) by type\n", heapName(initialHeap));
+ fprintf(file, "# NB invalid unless execution was traced with appropriate zeal\n");
+ fprintf(file, "# Total allocations: %" PRIu64 "\n", allocCount);
+
+ // There are many types but few are frequently used.
+ const size_t minObjectCount = 1;
+ const size_t outputEntries = 10;
+ std::vector<TypeId> topTypes;
+ for (size_t i = 0; i < types.size(); ++i) {
+ if (objectCountByType.at(i) > minObjectCount)
+ topTypes.push_back(i);
+ }
+ std::sort(topTypes.begin(), topTypes.end(),
+ [] (TypeId a, TypeId b) { return objectCountByType.at(a) > objectCountByType.at(b); });
+ size_t count = std::min(outputEntries, topTypes.size());
+
+ fprintf(file, "Lifetime");
+ for (unsigned i = 0; i < count; ++i)
+ fprintf(file, ", %15s", types[topTypes[i]].getName());
+ fprintf(file, "\n");
+
+ for (unsigned i = 0; i < lifetimeBins; ++i) {
+ fprintf(file, "%8d", binLimit(i));
+ for (unsigned j = 0; j < count; ++j)
+ fprintf(file, ", %8" PRIu64,
+ objectCountByTypeHeapAndLifetime.at(topTypes[j])[initialHeap][i]);
+ fprintf(file, "\n");
+ }
+}
+
+static void
+processAlloc(const AllocInfo& info, uint64_t finalizeTime)
+{
+ uint64_t lifetime = finalizeTime - info.serial;
+ unsigned timeslice = info.serial / timesliceSize;
+
+ unsigned lifetimeBin = getBin(lifetime);
+ assert(lifetimeBin < lifetimeBins);
+
+ ++allocCountByHeapAndKind[info.initialHeap][info.kind];
+ ++allocCountByHeapAndLifetime[info.initialHeap][lifetimeBin];
+ ++allocCountByHeapKindAndLifetime[info.initialHeap][info.kind][lifetimeBin];
+
+ if (info.kind <= LastObjectAllocKind) {
+ const TypeInfo& typeInfo = types[info.typeId];
+ const ClassInfo& classInfo = classes[typeInfo.classId];
+ ++objectCountByType.at(typeInfo.id);
+ ++objectCountByClass[classInfo.id];
+ ++objectCountByHeapAndClass[info.initialHeap][classInfo.id];
+ ++objectCountByHeapClassAndLifetime[info.initialHeap][classInfo.id][lifetimeBin];
+ ++objectCountByTypeHeapAndLifetime.at(typeInfo.id)[info.initialHeap][lifetimeBin];
+ if (info.initialHeap == TenuredHeap) {
+ FinalizerKind f = classes[classInfo.id].hasFinalizer;
+ ++heapObjectCountByFinalizerAndLifetime[f][lifetimeBin];
+ if (f == HasFinalizer)
+ ++finalizedHeapObjectCountByClassAndLifetime[classInfo.id][lifetimeBin];
+ }
+ }
+
+ uint64_t size = thingSizes[info.kind];
+ gcBytesAllocatedInSlice[timeslice] += size;
+ gcBytesFreedInSlice[finalizeTime / timesliceSize] += size;
+}
+
+static bool
+readTrace(FILE* file, uint64_t& trace)
+{
+ if (fread(&trace, sizeof(trace), 1, file) != 1) {
+ if (feof(file))
+ return false;
+ else
+ die("Error reading input");
+ }
+ return true;
+}
+
+static GCTraceEvent
+getTraceEvent(uint64_t trace)
+{
+ uint64_t event = trace >> TraceEventShift;
+ assert(event < GCTraceEventCount);
+ return (GCTraceEvent)event;
+}
+
+static uint64_t
+getTracePayload(uint64_t trace)
+{
+ return trace & ((1lu << TracePayloadBits) - 1);
+}
+
+static uint8_t
+getTraceExtra(uint64_t trace)
+{
+ uint64_t extra = (trace >> TraceExtraShift) & ((1 << TraceExtraBits) - 1);
+ assert(extra < 256);
+ return (uint8_t)extra;
+}
+
+static uint64_t
+expectTrace(FILE* file, GCTraceEvent event)
+{
+ uint64_t trace;
+ if (!readTrace(file, trace))
+ die("End of file while expecting trace %d", event);
+ if (getTraceEvent(trace) != event)
+ die("Expected trace %d but got trace %d", event, getTraceEvent(trace));
+ return getTracePayload(trace);
+}
+
+static uint64_t
+expectDataAddress(FILE* file)
+{
+ return expectTrace(file, TraceDataAddress);
+}
+
+static uint32_t
+expectDataInt(FILE* file)
+{
+ return (uint32_t)expectTrace(file, TraceDataInt);
+}
+
+static char*
+expectDataString(FILE* file)
+{
+ uint64_t length = expectTrace(file, TraceDataString);
+ assert(length < 256); // Sanity check
+ char* string = static_cast<char*>(malloc(length + 1));
+ if (!string)
+ die("Out of memory while reading string data");
+
+ const unsigned charsPerWord = sizeof(uint64_t);
+ unsigned wordCount = (length + charsPerWord - 1) / charsPerWord;
+ for (unsigned i = 0; i < wordCount; ++i) {
+ if (fread(&string[i * charsPerWord], sizeof(char), charsPerWord, file) != charsPerWord)
+ die("Error or EOF while reading string data");
+ }
+ string[length] = 0;
+
+ return string;
+}
+
+static void
+createClassInfo(const char* name, uint32_t flags, FinalizerKind hasFinalizer,
+ address clasp = 0)
+{
+ ClassId id = classes.size();
+ classes.push_back(ClassInfo(id, name, flags, hasFinalizer));
+ if (clasp)
+ classMap.emplace(clasp, id);
+}
+
+static void
+readClassInfo(FILE* file, address clasp)
+{
+ assert(clasp);
+ char* name = expectDataString(file);
+ uint32_t flags = expectDataInt(file);
+ FinalizerKind hasFinalizer = expectDataInt(file) != 0 ? HasFinalizer : NoFinalizer;
+ createClassInfo(name, flags, hasFinalizer, clasp);
+}
+
+static ClassId
+lookupClassId(address clasp)
+{
+ auto i = classMap.find(clasp);
+ assert(i != classMap.end());
+ ClassId id = i->second;
+ assert(id < classes.size());
+ return id;
+}
+
+static void
+createTypeInfo(ClassId classId, uint32_t flags, address typeObject = 0)
+{
+ TypeId id = types.size();
+ types.push_back(TypeInfo(id, classId, flags));
+ if (typeObject)
+ typeMap.emplace(typeObject, id);
+ objectCountByType.push_back(0);
+ objectCountByTypeHeapAndLifetime.push_back(EmptyArrayTag());
+}
+
+static void
+readTypeInfo(FILE* file, address typeObject)
+{
+ assert(typeObject);
+ address clasp = expectDataAddress(file);
+ uint32_t flags = expectDataInt(file);
+ createTypeInfo(lookupClassId(clasp), flags, typeObject);
+}
+
+static TypeId
+lookupTypeId(address typeObject)
+{
+ auto i = typeMap.find(typeObject);
+ assert(i != typeMap.end());
+ TypeId id = i->second;
+ assert(id < types.size());
+ return id;
+}
+
+static void
+setTypeName(address typeObject, const char* name)
+{
+ TypeId id = lookupTypeId(typeObject);
+ types[id].name = name;
+}
+
+static void
+allocHeapThing(address thing, AllocKind kind)
+{
+ uint64_t allocTime = allocCount++;
+ tenuredThings.emplace(thing, AllocInfo(allocTime, kind, TenuredHeap));
+}
+
+static void
+allocNurseryThing(address thing, AllocKind kind)
+{
+ uint64_t allocTime = allocCount++;
+ nurseryThings.emplace(thing, AllocInfo(allocTime, kind, Nursery));
+}
+
+static void
+setObjectType(address obj, address typeObject)
+{
+ auto j = nurseryThings.find(obj);
+ if (j == nurseryThings.end()) {
+ j = tenuredThings.find(obj);
+ if (j == tenuredThings.end())
+ die("Can't find allocation for object %p", obj);
+ }
+ j->second.typeId = lookupTypeId(typeObject);
+}
+
+static void
+promoteToTenured(address src, address dst)
+{
+ auto i = nurseryThings.find(src);
+ assert(i != nurseryThings.end());
+ AllocInfo alloc = i->second;
+ tenuredThings.emplace(dst, alloc);
+ nurseryThings.erase(i);
+}
+
+static void
+finalizeThing(const AllocInfo& info)
+{
+ processAlloc(info, allocCount);
+}
+
+static void
+sweepNursery()
+{
+ for (auto i = nurseryThings.begin(); i != nurseryThings.end(); ++i) {
+ finalizeThing(i->second);
+ }
+ nurseryThings.clear();
+}
+
+static void
+finalizeTenuredThing(address thing)
+{
+ auto i = tenuredThings.find(thing);
+ assert(i != tenuredThings.end());
+ finalizeThing(i->second);
+ tenuredThings.erase(i);
+}
+
+static void
+updateTimeslices(std::vector<uint64_t>& data, uint64_t lastTime, uint64_t currentTime, uint64_t value)
+{
+ unsigned firstSlice = (lastTime / timesliceSize) + 1;
+ unsigned lastSlice = currentTime / timesliceSize;
+ for (unsigned i = firstSlice; i <= lastSlice; ++i)
+ data[i] = value;
+}
+
+static void
+processTraceFile(const char* filename)
+{
+ FILE* file;
+ file = fopen(filename, "r");
+ if (!file)
+ die("Can't read file: %s", filename);
+
+ // Get a conservative estimate of the total number of allocations so we can
+ // allocate buffers in advance.
+ fseek(file, 0, SEEK_END);
+ size_t length = ftell(file);
+ fseek(file, 0, SEEK_SET);
+ size_t maxTraces = length / sizeof(uint64_t);
+
+ uint64_t trace;
+ if (!readTrace(file, trace))
+ die("Empty input file");
+ if (getTraceEvent(trace) != TraceEventInit)
+ die("Can't parse input file");
+ if (getTraceExtra(trace) != TraceFormatVersion)
+ die("Unexpected format version %d", getTraceExtra(trace));
+ for (unsigned kind = 0; kind < AllocKinds; ++kind)
+ thingSizes[kind] = expectTrace(file, TraceEventThingSize);
+
+ timesliceSize = 1000;
+ while ((maxTraces / timesliceSize ) > 1000)
+ timesliceSize *= 2;
+
+ size_t maxTimeslices = maxTraces / timesliceSize;
+ gcBytesAllocatedInSlice.reserve(maxTimeslices);
+ gcBytesFreedInSlice.reserve(maxTimeslices);
+ lifetimeBins = getBin(maxTraces) + 1;
+ assert(lifetimeBins <= MaxLifetimeBins);
+
+ createClassInfo("unknown", 0, NoFinalizer);
+ createTypeInfo(0, 0);
+ types[0].name = "unknown";
+
+ State state = StateMutator;
+ while (readTrace(file, trace)) {
+ GCTraceEvent event = getTraceEvent(trace);
+ switch (event) {
+ case TraceEventNurseryAlloc:
+ assert(state == StateMutator);
+ allocNurseryThing(getTracePayload(trace), getTraceExtra(trace));
+ break;
+ case TraceEventTenuredAlloc:
+ assert(state == StateMutator);
+ allocHeapThing(getTracePayload(trace), getTraceExtra(trace));
+ break;
+ case TraceEventClassInfo:
+ assert(state == StateMutator);
+ readClassInfo(file, getTracePayload(trace));
+ break;
+ case TraceEventTypeInfo:
+ assert(state == StateMutator);
+ readTypeInfo(file, getTracePayload(trace));
+ break;
+ case TraceEventTypeNewScript:
+ assert(state == StateMutator);
+ setTypeName(getTracePayload(trace), expectDataString(file));
+ break;
+ case TraceEventCreateObject:
+ assert(state == StateMutator);
+ setObjectType(getTracePayload(trace), expectDataAddress(file));
+ break;
+ case TraceEventMinorGCStart:
+ assert(state == StateMutator);
+ state = StateMinorGC;
+ break;
+ case TraceEventPromoteToTenured:
+ assert(state == StateMinorGC);
+ promoteToTenured(getTracePayload(trace), expectDataAddress(file));
+ break;
+ case TraceEventMinorGCEnd:
+ assert(state == StateMinorGC);
+ sweepNursery();
+ state = StateMutator;
+ break;
+ case TraceEventMajorGCStart:
+ assert(state == StateMutator);
+ state = StateMajorGC;
+ break;
+ case TraceEventTenuredFinalize:
+ assert(state == StateMajorGC);
+ finalizeTenuredThing(getTracePayload(trace));
+ break;
+ case TraceEventMajorGCEnd:
+ assert(state == StateMajorGC);
+ state = StateMutator;
+ break;
+ default:
+ assert(false);
+ die("Unexpected trace event %d", event);
+ break;
+ }
+ }
+
+ // Correct number of lifetime bins now we know the real allocation count.
+ assert(allocCount < maxTraces);
+ lifetimeBins = getBin(allocCount) + 1;
+ assert(lifetimeBins <= MaxLifetimeBins);
+
+ fclose(file);
+}
+
+template <class func>
+void withOutputFile(const char* base, const char* name, func f)
+{
+ const size_t bufSize = 256;
+ char filename[bufSize];
+ int r = snprintf(filename, bufSize, "%s-%s.csv", base, name);
+ assert(r > 0 && r < bufSize);
+
+ FILE* file = fopen(filename, "w");
+ if (!file)
+ die("Can't write to %s", filename);
+ f(file);
+ fclose(file);
+}
+
+int
+main(int argc, const char* argv[])
+{
+ testBinning();
+
+ if (argc != 3)
+ die("usage: gctrace INPUT_FILE OUTPUT_BASE");
+ const char* inputFile = argv[1];
+ const char* outputBase = argv[2];
+
+ processTraceFile(inputFile);
+
+ using namespace std::placeholders;
+ withOutputFile(outputBase, "bytesAllocatedBySlice", outputGcBytesAllocated);
+ withOutputFile(outputBase, "bytesUsedBySlice", outputGcBytesUsed);
+ withOutputFile(outputBase, "thingCounts", outputThingCounts);
+ withOutputFile(outputBase, "objectCounts", outputObjectCounts);
+ withOutputFile(outputBase, "lifetimeByClassForNursery",
+ std::bind(outputLifetimeByClass, _1, Nursery));
+ withOutputFile(outputBase, "lifetimeByKindForHeap",
+ std::bind(outputLifetimeByKind, _1, TenuredHeap));
+ withOutputFile(outputBase, "lifetimeByHeap", outputLifetimeByHeap);
+ withOutputFile(outputBase, "lifetimeByHasFinalizer",
+ outputLifetimeByHasFinalizer);
+ withOutputFile(outputBase, "finalizedHeapObjectlifetimeByClass",
+ outputFinalizedHeapObjectLifetimeByClass);
+ withOutputFile(outputBase, "lifetimeByTypeForNursery",
+ std::bind(outputLifetimeByType, _1, Nursery));
+ withOutputFile(outputBase, "lifetimeByTypeForHeap",
+ std::bind(outputLifetimeByType, _1, TenuredHeap));
+ return 0;
+}
diff --git a/js/src/devtools/gnuplot/gcTimer.gnu b/js/src/devtools/gnuplot/gcTimer.gnu
new file mode 100644
index 000000000..b8b3ac9d8
--- /dev/null
+++ b/js/src/devtools/gnuplot/gcTimer.gnu
@@ -0,0 +1,24 @@
+# gnuplot script to visualize GCMETER results.
+# usage: "gnuplot gcTimer.gnu >outputfile.png"
+
+set terminal png
+# set Title
+set title "Title goes here!"
+set datafile missing "-"
+set noxtics
+#set ytics nomirror
+set ylabel "msec"
+set key below
+set style data linespoints
+
+#set data file
+plot 'gcTimer.dat' using 2 title columnheader(2), \
+'' u 3 title columnheader(3) with points, \
+'' u 4 title columnheader(4), \
+'' u 5 title columnheader(5), \
+'' u 6 title columnheader(6) with points, \
+'' u 7 title columnheader(7) with points, \
+'' u 8 title columnheader(8) with points, \
+'' u 9 title columnheader(9) with points, \
+'' u 10 title columnheader(10) with points, \
+'' u 11 title columnheader(11) with points
diff --git a/js/src/devtools/javascript-trace.d b/js/src/devtools/javascript-trace.d
new file mode 100644
index 000000000..db759291c
--- /dev/null
+++ b/js/src/devtools/javascript-trace.d
@@ -0,0 +1,34 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*
+ * javascript provider probes
+ *
+ * function-entry (filename, classname, funcname)
+ * function-return (filename, classname, funcname)
+ * object-create (classname, *object)
+ * object-finalize (NULL, classname, *object)
+ * execute-start (filename, lineno)
+ * execute-done (filename, lineno)
+ */
+
+provider javascript {
+ probe function__entry(const char *, const char *, const char *);
+ probe function__return(const char *, const char *, const char *);
+ /* XXX must use unsigned longs here instead of uintptr_t for OS X
+ (Apple radar: 5194316 & 5565198) */
+ probe object__create(const char *, unsigned long);
+ probe object__finalize(const char *, const char *, unsigned long);
+ probe execute__start(const char *, int);
+ probe execute__done(const char *, int);
+};
+
+/*
+#pragma D attributes Unstable/Unstable/Common provider mozilla provider
+#pragma D attributes Private/Private/Unknown provider mozilla module
+#pragma D attributes Private/Private/Unknown provider mozilla function
+#pragma D attributes Unstable/Unstable/Common provider mozilla name
+*/
+
diff --git a/js/src/devtools/octane-csv.sh b/js/src/devtools/octane-csv.sh
new file mode 100755
index 000000000..1049a2b47
--- /dev/null
+++ b/js/src/devtools/octane-csv.sh
@@ -0,0 +1,76 @@
+#!/usr/bin/env bash
+
+set -e -o pipefail
+
+function echo_to_stderr {
+ echo "$1" 1>&2
+}
+
+function usage_and_exit {
+ echo_to_stderr "Usage:"
+ echo_to_stderr " $0 <path-to-js> <number-of-iterations>"
+ echo_to_stderr
+ echo_to_stderr "Run octane <number-of-iterations> times, and aggregate the results"
+ echo_to_stderr "into one CSV file, which is written to stdout."
+ echo_to_stderr
+ echo_to_stderr "See the js/src/devtools/plot-octane.R script for plotting the"
+ echo_to_stderr "results."
+ echo_to_stderr
+ echo_to_stderr "Complete example usage with plotting:"
+ echo_to_stderr
+ echo_to_stderr " \$ ./js/src/devtools/octane-csv.sh path/to/js 20 > control.csv"
+ echo_to_stderr
+ echo_to_stderr " Next, apply some patch you'd like to test."
+ echo_to_stderr
+ echo_to_stderr " \$ ./js/src/devtools/octane-csv.sh path/to/js 20 > variable.csv"
+ echo_to_stderr " \$ ./js/src/devtools/plot-octane.R control.csv variable.csv"
+ echo_to_stderr
+ echo_to_stderr " Open Rplots.pdf to view the results."
+ exit 1
+}
+
+if [[ "$#" != "2" ]]; then
+ usage_and_exit
+fi
+
+# Get the absolute, normalized $JS path, and ensure its an executable.
+
+JS_DIR=$(dirname $1)
+if [[ ! -d "$JS_DIR" ]]; then
+ echo_to_stderr "error: no such directory $JS_DIR"
+ echo_to_stderr
+ usage_and_exit
+fi
+
+JS=$(basename $1)
+cd "$JS_DIR" > /dev/null
+JS="$(pwd)/$JS"
+if [[ ! -e "$JS" ]]; then
+ echo_to_stderr "error: '$JS' is not executable"
+ echo_to_stderr
+ usage_and_exit
+fi
+cd - > /dev/null
+
+# Go to the js/src/octane directory.
+
+cd $(dirname $0)/../octane > /dev/null
+
+# Run octane and transform the results into CSV.
+#
+# Run once as a warm up, and to grab the column headers. Then run the benchmark
+# $ITERS times, grabbing just the data rows.
+
+echo_to_stderr "Warm up"
+"$JS" ./run.js | grep -v -- "----" | cut -f 1 -d ':' | tr '\n' ','
+echo
+
+ITERS=$2
+while [[ "$ITERS" -ge "1" ]]; do
+ echo_to_stderr "Iterations left: $ITERS"
+ "$JS" ./run.js | grep -v -- "----" | cut -f 2 -d ':' | tr '\n' ','
+ echo
+ ITERS=$((ITERS - 1))
+done
+
+echo_to_stderr "All done :)"
diff --git a/js/src/devtools/plot-octane.R b/js/src/devtools/plot-octane.R
new file mode 100755
index 000000000..cd7ac7303
--- /dev/null
+++ b/js/src/devtools/plot-octane.R
@@ -0,0 +1,38 @@
+#!/usr/bin/env Rscript
+
+# Usage:
+#
+# octane.R control.csv variable.csv
+#
+# Output will be placed in Rplots.pdf
+#
+# Remember: on Octane, higher is better!
+
+library(ggplot2)
+
+args <- commandArgs(trailingOnly = TRUE)
+
+# Reading in data.
+control <- read.table(args[1], sep=",", header=TRUE)
+variable <- read.table(args[2], sep=",", header=TRUE)
+
+# Pulling out columns that we want to plot.
+# Not totally necessary.
+ctrl <- control$Score..version.9.
+var <- variable$Score..version.9.
+
+# Concatenating the values we want to plot.
+score <- c(ctrl, var)
+# Creating a vector of labels for the data points.
+label <- c(rep("control", length(ctrl)), rep("variable", length(var)))
+
+# Creating a data frame of the score and label.
+data <- data.frame(label, score)
+
+# Now plotting!
+ggplot(data, aes(label, score, color=label, pch=label)) +
+ # Adding boxplot without the outliers.
+ geom_boxplot(outlier.shape=NA) +
+ # Adding jitter plot on top of the boxplot. If you want to spread the points
+ # more, increase jitter.
+ geom_jitter(position=position_jitter(width=0.05))
diff --git a/js/src/devtools/release/release-notes b/js/src/devtools/release/release-notes
new file mode 100755
index 000000000..48cc53ac9
--- /dev/null
+++ b/js/src/devtools/release/release-notes
@@ -0,0 +1,195 @@
+#!/usr/bin/perl
+
+# How to use:
+#
+# Step 1: run release-notes diff old-jsapi.h new-jsapi.h > diff.txt
+#
+# Step 2: edit diff.txt
+# - when a function has been renamed, get the - and + lines adjacent and mark the - line with [renamed] at the end
+# - when a function has been replaced, do the same (replacements behave differently)
+# - for anything that isn't a simple addition, deletion, rename, or replace, tag with [other]
+# (things tagged [other] will be put in a separate section for manual fixup)
+#
+# Step 3: run release-notes < diff.txt > changes.txt
+# - this will group changes into sections and annotate them with bug numbers
+# - the bugs chosen are just the bug that last touched each line, and are unlikely to be entirely accurate
+#
+# Step 4: run release-notes mdn < changes.txt > final.txt
+# - this will add an MDN link to every list item, first checking whether such a link is valid
+#
+# Step 5: paste into the MDN page, eg https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/Releases/45
+
+# Upcoming: basing everything off of jsapi.h is probably not going to work for
+# much longer, given that more stuff is moving into js/public. Scan
+# js/public/*.h too and record where everything comes from (to automate header
+# changes in the notes)?
+#
+# This is only looking at C style APIs. Dump out all methods too?
+#
+# The enbuggification should be split out into a separate phase because it is
+# wrong a fair amount of the time (whitespace changes, parameter changes,
+# etc.), and should have a way of running repeatedly so you can incrementally
+# fix stuff up.
+#
+# It would be very nice to have an example program that links against mozjs,
+# tested in CI, so we can diff that for release notes.
+
+use strict;
+use warnings;
+
+if (@ARGV && $ARGV[0] eq 'diff') {
+ my ($orig_file, $new_file) = @ARGV[1..2];
+ my $orig_api = grab_api($orig_file);
+ my $new_api = grab_api($new_file);
+ diff_apis($orig_api, $new_api);
+ exit 0;
+}
+
+my $path = "/en-US/docs/Mozilla/Projects/SpiderMonkey/JSAPI_Reference";
+my $url_prefix = "https://developer.mozilla.org$path";
+
+if (@ARGV && $ARGV[0] eq 'mdn') {
+ shift(@ARGV);
+ while(<>) {
+ if (/<li>([\w:]+)/) {
+ print STDERR "Checking $1...\n";
+ system("wget", "-q", "$url_prefix/$1");
+ if ($? == 0) {
+ s!<li>([\w:]+)!<li><a href="$path/$1">$1</a>!;
+ }
+ }
+ print;
+ }
+ exit 0;
+}
+
+sub grab_api {
+ my ($file) = @_;
+ open(my $fh, "<", $file) or die "open $file: $!";
+ my $grabbing;
+ my @api;
+ while(<$fh>) {
+ if ($grabbing && /^(\w+)/) {
+ push @api, $1;
+ }
+ $grabbing = /JS_PUBLIC_API/;
+ }
+ return \@api;
+}
+
+sub diff_apis {
+ my ($old, $new) = @_;
+ my %old;
+ @old{@$old} = ();
+ my %new;
+ @new{@$new} = ();
+
+ open(my $ofh, ">", "/tmp/r-c.diff.1");
+ print $ofh "$_\n" foreach (@$old);
+ close $ofh;
+ open(my $nfh, ">", "/tmp/r-c.diff.2");
+ print $nfh "$_\n" foreach (@$new);
+ close $nfh;
+ open(my $diff, "diff -u /tmp/r-c.diff.1 /tmp/r-c.diff.2 |");
+ while(<$diff>) {
+ if (/^-(\w+)/) {
+ next if exists $new{$1}; # Still exists, so skip it
+ } elsif (/^\+(\w+)/) {
+ next if exists $old{$1}; # It was already there, skip it
+ }
+ print;
+ }
+}
+
+my @added;
+my @renamed;
+my @replaced;
+my @deleted;
+my @other;
+
+my %N;
+
+my $renaming;
+my $replacing;
+while (<>) {
+ my $name;
+ if (/^[ +-](\w+)/) {
+ $name = $1;
+ $N{$name} = $name =~ /^JS_/ ? $name : "JS::$name";
+ }
+
+ if (/^-/) {
+ die if ! $name;
+ if (/\[rename\]/) {
+ $renaming = $name;
+ } elsif (/\[replace\]/) {
+ $replacing = $name;
+ } elsif (/\[other\]/) {
+ push @other, $name;
+ } else {
+ push @deleted, $name;
+ }
+ } elsif (/^\+/) {
+ die if ! $name;
+ if ($renaming) {
+ push @renamed, [ $renaming, $name ];
+ undef $renaming;
+ } elsif ($replacing) {
+ push @replaced, [ $replacing, $name ];
+ undef $replacing;
+ } elsif (/\[other\]/) {
+ push @other, $name;
+ } else {
+ push @added, $name;
+ }
+ }
+}
+
+open(my $fh, "<", "jsapi.blame") or die "open jsapi.blame: $!";
+my $grabbing;
+my %changerev;
+my %revs;
+while(<$fh>) {
+ if ($grabbing && /^\s*(\d+): (\w+)/ ) {
+ $changerev{$2} = $1;
+ $revs{$1} = 1;
+ }
+ $grabbing = /JS_PUBLIC_API/;
+}
+
+my %bug;
+for my $rev (keys %revs) {
+ open(my $fh, "hg log -r $rev -T '{desc}' |");
+ while(<$fh>) {
+ if (/[bB]ug (\d+)/) {
+ $bug{$rev} = $1;
+ }
+ }
+}
+
+sub get_bug_suffix {
+ my ($api) = @_;
+ $DB::single = 1 if ! $changerev{$api};
+ my $bug = $bug{$changerev{$api}};
+ return $bug ? " {{{bug($bug)}}}" : "";
+}
+
+print "(new apis)\n";
+print "<ul>\n";
+print " <li>$N{$_}" . get_bug_suffix($_) . "</li>\n" foreach @added;
+print " <li>$N{$_->[0]} renamed to $N{$_->[1]}" . get_bug_suffix($_->[1]) . "</li>\n" foreach @renamed;
+print " <li>$N{$_->[0]} replaced with $N{$_->[1]}" . get_bug_suffix($_->[1]) . "</li>\n" foreach @replaced;
+print "</ul>\n";
+print "\n";
+
+print qq(<h2 id="Deleted_APIs">Deleted APIs</h2>\n);
+print "<ul>\n";
+print " <li>$N{$_}</li>\n" foreach @deleted;
+print "</ul>\n";
+print "\n";
+
+print qq(<h2 id="Changed_APIs">Changed APIs</h2>\n);
+print "<ul>\n";
+print " <li>$N{$_}" . get_bug_suffix($_) . "</li>\n" foreach @other;
+print "</ul>\n";
+print "\n";
diff --git a/js/src/devtools/rootAnalysis/CFG.js b/js/src/devtools/rootAnalysis/CFG.js
new file mode 100644
index 000000000..6e9facaa1
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/CFG.js
@@ -0,0 +1,159 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+var functionBodies;
+
+function findAllPoints(bodies, blockId)
+{
+ var points = [];
+ var body;
+
+ for (var xbody of bodies) {
+ if (sameBlockId(xbody.BlockId, blockId)) {
+ assert(!body);
+ body = xbody;
+ }
+ }
+ assert(body);
+
+ if (!("PEdge" in body))
+ return;
+ for (var edge of body.PEdge) {
+ points.push([body, edge.Index[0]]);
+ if (edge.Kind == "Loop")
+ Array.prototype.push.apply(points, findAllPoints(bodies, edge.BlockId));
+ }
+
+ return points;
+}
+
+function isMatchingDestructor(constructor, edge)
+{
+ if (edge.Kind != "Call")
+ return false;
+ var callee = edge.Exp[0];
+ if (callee.Kind != "Var")
+ return false;
+ var variable = callee.Variable;
+ assert(variable.Kind == "Func");
+ if (variable.Name[1].charAt(0) != '~')
+ return false;
+
+ var constructExp = constructor.PEdgeCallInstance.Exp;
+ assert(constructExp.Kind == "Var");
+
+ var destructExp = edge.PEdgeCallInstance.Exp;
+ if (destructExp.Kind != "Var")
+ return false;
+
+ return sameVariable(constructExp.Variable, destructExp.Variable);
+}
+
+// Return all calls within the RAII scope of any constructor matched by
+// isConstructor(). (Note that this would be insufficient if you needed to
+// treat each instance separately, such as when different regions of a function
+// body were guarded by these constructors and you needed to do something
+// different with each.)
+function allRAIIGuardedCallPoints(bodies, body, isConstructor)
+{
+ if (!("PEdge" in body))
+ return [];
+
+ var points = [];
+
+ for (var edge of body.PEdge) {
+ if (edge.Kind != "Call")
+ continue;
+ var callee = edge.Exp[0];
+ if (callee.Kind != "Var")
+ continue;
+ var variable = callee.Variable;
+ assert(variable.Kind == "Func");
+ if (!isConstructor(edge.Type, variable.Name))
+ continue;
+ if (!("PEdgeCallInstance" in edge))
+ continue;
+ if (edge.PEdgeCallInstance.Exp.Kind != "Var")
+ continue;
+
+ Array.prototype.push.apply(points, pointsInRAIIScope(bodies, body, edge));
+ }
+
+ return points;
+}
+
+// Test whether the given edge is the constructor corresponding to the given
+// destructor edge
+function isMatchingConstructor(destructor, edge)
+{
+ if (edge.Kind != "Call")
+ return false;
+ var callee = edge.Exp[0];
+ if (callee.Kind != "Var")
+ return false;
+ var variable = callee.Variable;
+ if (variable.Kind != "Func")
+ return false;
+ var name = readable(variable.Name[0]);
+ var destructorName = readable(destructor.Exp[0].Variable.Name[0]);
+ var match = destructorName.match(/^(.*?::)~(\w+)\(/);
+ if (!match) {
+ printErr("Unhandled destructor syntax: " + destructorName);
+ return false;
+ }
+ var constructorSubstring = match[1] + match[2];
+ if (name.indexOf(constructorSubstring) == -1)
+ return false;
+
+ var destructExp = destructor.PEdgeCallInstance.Exp;
+ assert(destructExp.Kind == "Var");
+
+ var constructExp = edge.PEdgeCallInstance.Exp;
+ if (constructExp.Kind != "Var")
+ return false;
+
+ return sameVariable(constructExp.Variable, destructExp.Variable);
+}
+
+function findMatchingConstructor(destructorEdge, body)
+{
+ var worklist = [destructorEdge];
+ var predecessors = getPredecessors(body);
+ while(worklist.length > 0) {
+ var edge = worklist.pop();
+ if (isMatchingConstructor(destructorEdge, edge))
+ return edge;
+ if (edge.Index[0] in predecessors) {
+ for (var e of predecessors[edge.Index[0]])
+ worklist.push(e);
+ }
+ }
+ printErr("Could not find matching constructor!");
+ debugger;
+}
+
+function pointsInRAIIScope(bodies, body, constructorEdge) {
+ var seen = {};
+ var worklist = [constructorEdge.Index[1]];
+ var points = [];
+ while (worklist.length) {
+ var point = worklist.pop();
+ if (point in seen)
+ continue;
+ seen[point] = true;
+ points.push([body, point]);
+ var successors = getSuccessors(body);
+ if (!(point in successors))
+ continue;
+ for (var nedge of successors[point]) {
+ if (isMatchingDestructor(constructorEdge, nedge))
+ continue;
+ if (nedge.Kind == "Loop")
+ Array.prototype.push.apply(points, findAllPoints(bodies, nedge.BlockId));
+ worklist.push(nedge.Index[1]);
+ }
+ }
+
+ return points;
+}
diff --git a/js/src/devtools/rootAnalysis/Makefile.in b/js/src/devtools/rootAnalysis/Makefile.in
new file mode 100644
index 000000000..896e03e65
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/Makefile.in
@@ -0,0 +1,79 @@
+# -*- Mode: makefile -*-
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This Makefile is used to kick off a static rooting analysis. This Makefile is
+# NOT intended for use as part of the standard Mozilla build. Instead, this
+# Makefile will use $PATH to subvert compiler invocations to add in the sixgill
+# plugin, and then do a regular build of whatever portion of the tree you are
+# analyzing. The plugins will dump out several xdb database files. Various
+# analysis scripts, written in JS, will run over those database files to
+# produce the final analysis output.
+
+include $(topsrcdir)/config/config.mk
+
+# Tree to build and analyze, defaulting to the current tree
+TARGET_JSOBJDIR ?= $(MOZ_BUILD_ROOT)
+
+# Path to a JS binary to use to run the analysis. You really want this to be an
+# optimized build.
+JS ?= $(MOZ_BUILD_ROOT)/shell/js
+
+# Path to an xgill checkout containing the GCC plugin, xdb-processing binaries,
+# and compiler wrapper scripts used to automatically integrate into an existing
+# build system.
+SIXGILL ?= @SIXGILL_PATH@
+
+# Path to the JS scripts that will perform the analysis, defaulting to the same
+# place as this Makefile.in, which is probably what you want.
+ANALYSIS_SCRIPT_DIR ?= $(srcdir)
+
+# Number of simultanous analyzeRoots.js scripts to run.
+JOBS ?= 6
+
+all : rootingHazards.txt allFunctions.txt
+
+CALL_JS := time env PATH=$$PATH:$(SIXGILL)/bin XDB=$(SIXGILL)/bin/xdb.so $(JS)
+
+src_body.xdb src_comp.xdb: run_complete
+ @echo Started compilation at $$(date)
+ $(ANALYSIS_SCRIPT_DIR)/run_complete --foreground --build-root=$(TARGET_JSOBJDIR) --work-dir=work -b $(SIXGILL)/bin $(CURDIR)
+ @echo Finished compilation at $$(date)
+
+callgraph.txt: src_body.xdb src_comp.xdb computeCallgraph.js
+ @echo Started computation of $@ at $$(date)
+ $(CALL_JS) $(ANALYSIS_SCRIPT_DIR)/computeCallgraph.js > $@.tmp
+ mv $@.tmp $@
+ @echo Finished computation of $@ at $$(date)
+
+gcFunctions.txt: callgraph.txt computeGCFunctions.js annotations.js
+ @echo Started computation of $@ at $$(date)
+ $(CALL_JS) $(ANALYSIS_SCRIPT_DIR)/computeGCFunctions.js ./callgraph.txt > $@.tmp
+ mv $@.tmp $@
+ @echo Finished computation of $@ at $$(date)
+
+gcFunctions.lst: gcFunctions.txt
+ perl -lne 'print $$1 if /^GC Function: (.*)/' gcFunctions.txt > $@
+
+suppressedFunctions.lst: gcFunctions.txt
+ perl -lne 'print $$1 if /^Suppressed Function: (.*)/' gcFunctions.txt > $@
+
+gcTypes.txt: src_comp.xdb computeGCTypes.js annotations.js
+ @echo Started computation of $@ at $$(date)
+ $(CALL_JS) $(ANALYSIS_SCRIPT_DIR)/computeGCTypes.js > $@.tmp
+ mv $@.tmp $@
+ @echo Finished computation of $@ at $$(date)
+
+allFunctions.txt: src_body.xdb
+ @echo Started computation of $@ at $$(date)
+ time $(SIXGILL)/bin/xdbkeys $^ > $@.tmp
+ mv $@.tmp $@
+ @echo Finished computation of $@ at $$(date)
+
+rootingHazards.txt: gcFunctions.lst suppressedFunctions.lst gcTypes.txt analyzeRoots.js annotations.js gen-hazards.sh
+ @echo Started computation of $@ at $$(date)
+ time env JS=$(JS) ANALYZE='$(ANALYSIS_SCRIPT_DIR)/analyzeRoots.js' SIXGILL='$(SIXGILL)' '$(ANALYSIS_SCRIPT_DIR)/gen-hazards.sh' $(JOBS) > $@.tmp
+ mv $@.tmp $@
+ @echo Finished computation of $@ at $$(date)
diff --git a/js/src/devtools/rootAnalysis/README.md b/js/src/devtools/rootAnalysis/README.md
new file mode 100644
index 000000000..0588cae66
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/README.md
@@ -0,0 +1,64 @@
+# Spidermonkey JSAPI rooting analysis
+
+This directory contains scripts for running Brian Hackett's static GC rooting
+analysis on a JS source directory.
+
+To use it on SpiderMonkey:
+
+1. Be on Fedora/CentOS/RedHat Linux x86_64, or a Docker image of one of those.
+
+ Specifically, the prebuilt GCC **won't work on Ubuntu**
+ without the `CFLAGS` and `CXXFLAGS` settings from
+ <http://trac.wildfiregames.com/wiki/StaticRootingAnalysis>.
+
+2. Have the Gecko build prerequisites installed.
+
+3. Install taskcluster-vcs, eg by doing
+
+ npm install taskcluster-vcs
+ export PATH="$PATH:$(pwd)/node_modules/.bin"
+
+4. In some directory, using $SRCDIR as the top of your Gecko source checkout,
+ run these commands:
+
+ mkdir work
+ cd work
+ ( export GECKO_DIR=$SRCDIR; $GECKO_DIR/taskcluster/scripts/builder/build-haz-linux.sh $(pwd) --dep )
+
+The `--dep` is optional, and will avoid rebuilding the JS shell used to run the
+analysis later.
+
+If you see the error ``/lib/../lib64/crti.o: unrecognized relocation (0x2a) in section .init`` then have a version mismatch between the precompiled gcc used in automation and your installed glibc. The easiest way to fix this is to delete the ld provided with the precompiled gcc (it will be in two places, one given in the first part of the error message), which will cause gcc to fall back to your system ld. But you will need to additionally pass ``--no-tooltool`` to build-haz-linux.sh. With the current package, you could do the deletion with
+
+ rm gcc/bin/ld
+ rm gcc/x86_64-unknown-linux-gnu/bin/ld
+
+Output goes to `analysis/hazards.txt`. This will run the
+analysis on the js/src tree only; if you wish to analyze the full browser, use
+
+ ( export GECKO_DIR=$SRCDIR; $GECKO_DIR/taskcluster/scripts/builder/build-haz-linux.sh --project browser $(pwd) )
+
+After running the analysis once, you can reuse the `*.xdb` database files
+generated, using modified analysis scripts, by running
+`analysis/run-analysis.sh` (or pass `--list` to see ways to select even more
+restrictive parts of the overall analysis; the default is `gcTypes` which will
+do everything but regenerate the xdb files).
+
+Also, you can pass `-v` to get exact command lines to cut & paste for running the
+various stages, which is helpful for running under a debugger.
+
+
+## Overview of what is going on here
+
+So what does this actually do?
+
+1. It downloads a GCC compiler and plugin ("sixgill") from Mozilla servers, using
+ "tooltool" (a binary archive tool).
+
+2. It runs `run_complete`, a script that builds the target codebase with the
+ downloaded GCC, generating a few database files containing control flow
+ graphs of the full compile, along with type information etc.
+
+3. Then it runs `analyze.py`, a Python script, which runs all the scripts
+ which actually perform the analysis -- the tricky parts.
+ (Those scripts are written in JS.)
diff --git a/js/src/devtools/rootAnalysis/analyze.py b/js/src/devtools/rootAnalysis/analyze.py
new file mode 100755
index 000000000..69482dab7
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/analyze.py
@@ -0,0 +1,298 @@
+#!/usr/bin/python
+
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Runs the static rooting analysis
+"""
+
+from subprocess import Popen
+import subprocess
+import os
+import argparse
+import sys
+import re
+
+def env(config):
+ e = dict(os.environ)
+ e['PATH'] = ':'.join(p for p in (config.get('gcc_bin'), config.get('sixgill_bin'), e['PATH']) if p)
+ e['XDB'] = '%(sixgill_bin)s/xdb.so' % config
+ e['SOURCE'] = config['source']
+ e['ANALYZED_OBJDIR'] = config['objdir']
+ return e
+
+def fill(command, config):
+ try:
+ return tuple(s % config for s in command)
+ except:
+ print("Substitution failed:")
+ problems = []
+ for fragment in command:
+ try:
+ fragment % config
+ except:
+ problems.append(fragment)
+ raise Exception("\n".join(["Substitution failed:"] + [ " %s" % s for s in problems ]))
+
+def print_command(command, outfile=None, env=None):
+ output = ' '.join(command)
+ if outfile:
+ output += ' > ' + outfile
+ if env:
+ changed = {}
+ e = os.environ
+ for key,value in env.items():
+ if (key not in e) or (e[key] != value):
+ changed[key] = value
+ if changed:
+ outputs = []
+ for key, value in changed.items():
+ if key in e and e[key] in value:
+ start = value.index(e[key])
+ end = start + len(e[key])
+ outputs.append('%s="%s${%s}%s"' % (key,
+ value[:start],
+ key,
+ value[end:]))
+ else:
+ outputs.append("%s='%s'" % (key, value))
+ output = ' '.join(outputs) + " " + output
+
+ print output
+
+def generate_hazards(config, outfilename):
+ jobs = []
+ for i in range(int(config['jobs'])):
+ command = fill(('%(js)s',
+ '%(analysis_scriptdir)s/analyzeRoots.js',
+ '%(gcFunctions_list)s',
+ '%(gcEdges)s',
+ '%(suppressedFunctions_list)s',
+ '%(gcTypes)s',
+ '%(typeInfo)s',
+ str(i+1), '%(jobs)s',
+ 'tmp.%s' % (i+1,)),
+ config)
+ outfile = 'rootingHazards.%s' % (i+1,)
+ output = open(outfile, 'w')
+ if config['verbose']:
+ print_command(command, outfile=outfile, env=env(config))
+ jobs.append((command, Popen(command, stdout=output, env=env(config))))
+
+ final_status = 0
+ while jobs:
+ pid, status = os.wait()
+ jobs = [ job for job in jobs if job[1].pid != pid ]
+ final_status = final_status or status
+
+ if final_status:
+ raise subprocess.CalledProcessError(final_status, 'analyzeRoots.js')
+
+ with open(outfilename, 'w') as output:
+ command = ['cat'] + [ 'rootingHazards.%s' % (i+1,) for i in range(int(config['jobs'])) ]
+ if config['verbose']:
+ print_command(command, outfile=outfilename)
+ subprocess.call(command, stdout=output)
+
+JOBS = { 'dbs':
+ (('%(ANALYSIS_SCRIPTDIR)s/run_complete',
+ '--foreground',
+ '--no-logs',
+ '--build-root=%(objdir)s',
+ '--wrap-dir=%(sixgill)s/scripts/wrap_gcc',
+ '--work-dir=work',
+ '-b', '%(sixgill_bin)s',
+ '--buildcommand=%(buildcommand)s',
+ '.'),
+ ()),
+
+ 'list-dbs':
+ (('ls', '-l'),
+ ()),
+
+ 'callgraph':
+ (('%(js)s', '%(analysis_scriptdir)s/computeCallgraph.js', '%(typeInfo)s'),
+ 'callgraph.txt'),
+
+ 'gcFunctions':
+ (('%(js)s', '%(analysis_scriptdir)s/computeGCFunctions.js', '%(callgraph)s',
+ '[gcFunctions]', '[gcFunctions_list]', '[gcEdges]', '[suppressedFunctions_list]'),
+ ('gcFunctions.txt', 'gcFunctions.lst', 'gcEdges.txt', 'suppressedFunctions.lst')),
+
+ 'gcTypes':
+ (('%(js)s', '%(analysis_scriptdir)s/computeGCTypes.js',
+ '[gcTypes]', '[typeInfo]'),
+ ('gcTypes.txt', 'typeInfo.txt')),
+
+ 'allFunctions':
+ (('%(sixgill_bin)s/xdbkeys', 'src_body.xdb',),
+ 'allFunctions.txt'),
+
+ 'hazards':
+ (generate_hazards, 'rootingHazards.txt'),
+
+ 'explain':
+ ((os.environ.get('PYTHON', 'python2.7'),
+ '%(analysis_scriptdir)s/explain.py',
+ '%(hazards)s', '%(gcFunctions)s',
+ '[explained_hazards]', '[unnecessary]', '[refs]'),
+ ('hazards.txt', 'unnecessary.txt', 'refs.txt'))
+ }
+
+def out_indexes(command):
+ for i in range(len(command)):
+ m = re.match(r'^\[(.*)\]$', command[i])
+ if m:
+ yield (i, m.group(1))
+
+def run_job(name, config):
+ cmdspec, outfiles = JOBS[name]
+ print("Running " + name + " to generate " + str(outfiles))
+ if hasattr(cmdspec, '__call__'):
+ cmdspec(config, outfiles)
+ else:
+ temp_map = {}
+ cmdspec = fill(cmdspec, config)
+ if isinstance(outfiles, basestring):
+ stdout_filename = '%s.tmp' % name
+ temp_map[stdout_filename] = outfiles
+ if config['verbose']:
+ print_command(cmdspec, outfile=outfiles, env=env(config))
+ else:
+ stdout_filename = None
+ pc = list(cmdspec)
+ outfile = 0
+ for (i, name) in out_indexes(cmdspec):
+ pc[i] = outfiles[outfile]
+ outfile += 1
+ if config['verbose']:
+ print_command(pc, env=env(config))
+
+ command = list(cmdspec)
+ outfile = 0
+ for (i, name) in out_indexes(cmdspec):
+ command[i] = '%s.tmp' % name
+ temp_map[command[i]] = outfiles[outfile]
+ outfile += 1
+
+ sys.stdout.flush()
+ if stdout_filename is None:
+ subprocess.check_call(command, env=env(config))
+ else:
+ with open(stdout_filename, 'w') as output:
+ subprocess.check_call(command, stdout=output, env=env(config))
+ for (temp, final) in temp_map.items():
+ try:
+ os.rename(temp, final)
+ except OSError:
+ print("Error renaming %s -> %s" % (temp, final))
+ raise
+
+config = { 'ANALYSIS_SCRIPTDIR': os.path.dirname(__file__) }
+
+defaults = [ '%s/defaults.py' % config['ANALYSIS_SCRIPTDIR'],
+ '%s/defaults.py' % os.getcwd() ]
+
+parser = argparse.ArgumentParser(description='Statically analyze build tree for rooting hazards.')
+parser.add_argument('step', metavar='STEP', type=str, nargs='?',
+ help='run starting from this step')
+parser.add_argument('--source', metavar='SOURCE', type=str, nargs='?',
+ help='source code to analyze')
+parser.add_argument('--objdir', metavar='DIR', type=str, nargs='?',
+ help='object directory of compiled files')
+parser.add_argument('--js', metavar='JSSHELL', type=str, nargs='?',
+ help='full path to ctypes-capable JS shell')
+parser.add_argument('--upto', metavar='UPTO', type=str, nargs='?',
+ help='last step to execute')
+parser.add_argument('--jobs', '-j', default=None, metavar='JOBS', type=int,
+ help='number of simultaneous analyzeRoots.js jobs')
+parser.add_argument('--list', const=True, nargs='?', type=bool,
+ help='display available steps')
+parser.add_argument('--buildcommand', '--build', '-b', type=str, nargs='?',
+ help='command to build the tree being analyzed')
+parser.add_argument('--tag', '-t', type=str, nargs='?',
+ help='name of job, also sets build command to "build.<tag>"')
+parser.add_argument('--expect-file', type=str, nargs='?',
+ help='deprecated option, temporarily still present for backwards compatibility')
+parser.add_argument('--verbose', '-v', action='store_true',
+ help='Display cut & paste commands to run individual steps')
+
+args = parser.parse_args()
+
+for default in defaults:
+ try:
+ execfile(default, config)
+ if args.verbose:
+ print("Loaded %s" % default)
+ except:
+ pass
+
+data = config.copy()
+
+for k,v in vars(args).items():
+ if v is not None:
+ data[k] = v
+
+if args.tag and not args.buildcommand:
+ args.buildcommand="build.%s" % args.tag
+
+if args.jobs is not None:
+ data['jobs'] = args.jobs
+if not data.get('jobs'):
+ data['jobs'] = subprocess.check_output(['nproc', '--ignore=1']).strip()
+
+if args.buildcommand:
+ data['buildcommand'] = args.buildcommand
+elif 'BUILD' in os.environ:
+ data['buildcommand'] = os.environ['BUILD']
+else:
+ data['buildcommand'] = 'make -j4 -s'
+
+if 'ANALYZED_OBJDIR' in os.environ:
+ data['objdir'] = os.environ['ANALYZED_OBJDIR']
+
+if 'SOURCE' in os.environ:
+ data['source'] = os.environ['SOURCE']
+if not data.get('source') and data.get('sixgill_bin'):
+ path = subprocess.check_output(['sh', '-c', data['sixgill_bin'] + '/xdbkeys file_source.xdb | grep jsapi.cpp'])
+ data['source'] = path.replace("/js/src/jsapi.cpp", "")
+
+steps = [ 'dbs',
+ 'gcTypes',
+ 'callgraph',
+ 'gcFunctions',
+ 'allFunctions',
+ 'hazards',
+ 'explain' ]
+
+if args.list:
+ for step in steps:
+ command, outfilename = JOBS[step]
+ if outfilename:
+ print("%s -> %s" % (step, outfilename))
+ else:
+ print(step)
+ sys.exit(0)
+
+for step in steps:
+ command, outfiles = JOBS[step]
+ if isinstance(outfiles, basestring):
+ data[step] = outfiles
+ else:
+ outfile = 0
+ for (i, name) in out_indexes(command):
+ data[name] = outfiles[outfile]
+ outfile += 1
+ assert len(outfiles) == outfile, 'step \'%s\': mismatched number of output files (%d) and params (%d)' % (step, outfile, len(outfiles))
+
+if args.step:
+ steps = steps[steps.index(args.step):]
+
+if args.upto:
+ steps = steps[:steps.index(args.upto)+1]
+
+for step in steps:
+ run_job(step, data)
diff --git a/js/src/devtools/rootAnalysis/analyzeRoots.js b/js/src/devtools/rootAnalysis/analyzeRoots.js
new file mode 100644
index 000000000..61b46e387
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/analyzeRoots.js
@@ -0,0 +1,871 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+loadRelativeToScript('utility.js');
+loadRelativeToScript('annotations.js');
+loadRelativeToScript('CFG.js');
+
+var sourceRoot = (os.getenv('SOURCE') || '') + '/'
+
+var functionName;
+var functionBodies;
+
+if (typeof scriptArgs[0] != 'string' || typeof scriptArgs[1] != 'string')
+ throw "Usage: analyzeRoots.js [-f function_name] <gcFunctions.lst> <gcEdges.txt> <suppressedFunctions.lst> <gcTypes.txt> <typeInfo.txt> [start end [tmpfile]]";
+
+var theFunctionNameToFind;
+if (scriptArgs[0] == '--function' || scriptArgs[0] == '-f') {
+ theFunctionNameToFind = scriptArgs[1];
+ scriptArgs = scriptArgs.slice(2);
+}
+
+var gcFunctionsFile = scriptArgs[0] || "gcFunctions.lst";
+var gcEdgesFile = scriptArgs[1] || "gcEdges.txt";
+var suppressedFunctionsFile = scriptArgs[2] || "suppressedFunctions.lst";
+var gcTypesFile = scriptArgs[3] || "gcTypes.txt";
+var typeInfoFile = scriptArgs[4] || "typeInfo.txt";
+var batch = (scriptArgs[5]|0) || 1;
+var numBatches = (scriptArgs[6]|0) || 1;
+var tmpfile = scriptArgs[7] || "tmp.txt";
+
+GCSuppressionTypes = loadTypeInfo(typeInfoFile)["Suppress GC"] || [];
+
+var gcFunctions = {};
+var text = snarf("gcFunctions.lst").split("\n");
+assert(text.pop().length == 0);
+for (var line of text)
+ gcFunctions[mangled(line)] = true;
+
+var suppressedFunctions = {};
+var text = snarf(suppressedFunctionsFile).split("\n");
+assert(text.pop().length == 0);
+for (var line of text) {
+ suppressedFunctions[line] = true;
+}
+text = null;
+
+var gcEdges = {};
+text = snarf(gcEdgesFile).split('\n');
+assert(text.pop().length == 0);
+for (var line of text) {
+ var [ block, edge, func ] = line.split(" || ");
+ if (!(block in gcEdges))
+ gcEdges[block] = {}
+ gcEdges[block][edge] = func;
+}
+text = null;
+
+var match;
+var gcThings = {};
+var gcPointers = {};
+
+text = snarf(gcTypesFile).split("\n");
+for (var line of text) {
+ if (match = /^GCThing: (.*)/.exec(line))
+ gcThings[match[1]] = true;
+ if (match = /^GCPointer: (.*)/.exec(line))
+ gcPointers[match[1]] = true;
+}
+text = null;
+
+function isGCType(type)
+{
+ if (type.Kind == "CSU")
+ return type.Name in gcThings;
+ else if (type.Kind == "Array")
+ return isGCType(type.Type);
+ return false;
+}
+
+function isUnrootedType(type)
+{
+ if (type.Kind == "Pointer")
+ return isGCType(type.Type);
+ else if (type.Kind == "Array")
+ return isUnrootedType(type.Type);
+ else if (type.Kind == "CSU")
+ return type.Name in gcPointers;
+ else
+ return false;
+}
+
+function expressionUsesVariable(exp, variable)
+{
+ if (exp.Kind == "Var" && sameVariable(exp.Variable, variable))
+ return true;
+ if (!("Exp" in exp))
+ return false;
+ for (var childExp of exp.Exp) {
+ if (expressionUsesVariable(childExp, variable))
+ return true;
+ }
+ return false;
+}
+
+function expressionUsesVariableContents(exp, variable)
+{
+ if (!("Exp" in exp))
+ return false;
+ for (var childExp of exp.Exp) {
+ if (childExp.Kind == 'Drf') {
+ if (expressionUsesVariable(childExp, variable))
+ return true;
+ } else if (expressionUsesVariableContents(childExp, variable)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Detect simple |return nullptr;| statements.
+function isReturningImmobileValue(edge, variable)
+{
+ if (variable.Kind == "Return") {
+ if (edge.Exp[0].Kind == "Var" && sameVariable(edge.Exp[0].Variable, variable)) {
+ if (edge.Exp[1].Kind == "Int" && edge.Exp[1].String == "0") {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+// If the edge uses the given variable's value, return the earliest point at
+// which the use is definite. Usually, that means the source of the edge
+// (anything that reaches that source point will end up using the variable, but
+// there may be other ways to reach the destination of the edge.)
+//
+// Return values are implicitly used at the very last point in the function.
+// This makes a difference: if an RAII class GCs in its destructor, we need to
+// start looking at the final point in the function, not one point back from
+// that, since that would skip over the GCing call.
+//
+// Note that this returns true only if the variable's incoming value is used.
+// So this would return false for 'obj':
+//
+// obj = someFunction();
+//
+// but these would return true:
+//
+// obj = someFunction(obj);
+// obj->foo = someFunction();
+//
+function edgeUsesVariable(edge, variable, body)
+{
+ if (ignoreEdgeUse(edge, variable, body))
+ return 0;
+
+ if (variable.Kind == "Return" && body.Index[1] == edge.Index[1] && body.BlockId.Kind == "Function")
+ return edge.Index[1]; // Last point in function body uses the return value.
+
+ var src = edge.Index[0];
+
+ switch (edge.Kind) {
+
+ case "Assign": {
+ if (isReturningImmobileValue(edge, variable))
+ return 0;
+ const [lhs, rhs] = edge.Exp;
+ if (expressionUsesVariable(rhs, variable))
+ return src;
+ if (expressionUsesVariable(lhs, variable) && !expressionIsVariable(lhs, variable))
+ return src;
+ return 0;
+ }
+
+ case "Assume":
+ return expressionUsesVariableContents(edge.Exp[0], variable) ? src : 0;
+
+ case "Call": {
+ const callee = edge.Exp[0];
+ if (expressionUsesVariable(callee, variable))
+ return src;
+ if ("PEdgeCallInstance" in edge) {
+ if (expressionUsesVariable(edge.PEdgeCallInstance.Exp, variable)) {
+ if (edgeKillsVariable(edge, variable)) {
+ // If the variable is being constructed, then the incoming
+ // value is not used here; it didn't exist before
+ // construction. (The analysis doesn't get told where
+ // variables are defined, so must infer it from
+ // construction. If the variable does not have a
+ // constructor, its live range may be larger than it really
+ // ought to be if it is defined within a loop body, but
+ // that is conservative.)
+ } else {
+ return src;
+ }
+ }
+ }
+ if ("PEdgeCallArguments" in edge) {
+ for (var exp of edge.PEdgeCallArguments.Exp) {
+ if (expressionUsesVariable(exp, variable))
+ return src;
+ }
+ }
+ if (edge.Exp.length == 1)
+ return 0;
+
+ // Assigning call result to a variable.
+ const lhs = edge.Exp[1];
+ if (expressionUsesVariable(lhs, variable) && !expressionIsVariable(lhs, variable))
+ return src;
+ return 0;
+ }
+
+ case "Loop":
+ return 0;
+
+ default:
+ assert(false);
+ }
+}
+
+function expressionIsVariableAddress(exp, variable)
+{
+ while (exp.Kind == "Fld")
+ exp = exp.Exp[0];
+ return exp.Kind == "Var" && sameVariable(exp.Variable, variable);
+}
+
+function edgeTakesVariableAddress(edge, variable, body)
+{
+ if (ignoreEdgeUse(edge, variable, body))
+ return false;
+ if (ignoreEdgeAddressTaken(edge))
+ return false;
+ switch (edge.Kind) {
+ case "Assign":
+ return expressionIsVariableAddress(edge.Exp[1], variable);
+ case "Call":
+ if ("PEdgeCallArguments" in edge) {
+ for (var exp of edge.PEdgeCallArguments.Exp) {
+ if (expressionIsVariableAddress(exp, variable))
+ return true;
+ }
+ }
+ return false;
+ default:
+ return false;
+ }
+}
+
+function expressionIsVariable(exp, variable)
+{
+ return exp.Kind == "Var" && sameVariable(exp.Variable, variable);
+}
+
+// Return whether the edge kills (overwrites) the variable's incoming value.
+// Examples of killing 'obj':
+//
+// obj = foo;
+// obj = foo();
+// obj = foo(obj); // uses previous value but then kills it
+// SomeClass obj(true, 1); // constructor
+//
+function edgeKillsVariable(edge, variable)
+{
+ // Direct assignments kill their lhs: var = value
+ if (edge.Kind == "Assign") {
+ const [lhs] = edge.Exp;
+ return (expressionIsVariable(lhs, variable) &&
+ !isReturningImmobileValue(edge, variable));
+ }
+
+ if (edge.Kind != "Call")
+ return false;
+
+ // Assignments of call results kill their lhs.
+ if (1 in edge.Exp) {
+ var lhs = edge.Exp[1];
+ if (expressionIsVariable(lhs, variable))
+ return true;
+ }
+
+ // Constructor calls kill their 'this' value.
+ if ("PEdgeCallInstance" in edge) {
+ var instance = edge.PEdgeCallInstance.Exp;
+
+ // Kludge around incorrect dereference on some constructor calls.
+ if (instance.Kind == "Drf")
+ instance = instance.Exp[0];
+
+ if (!expressionIsVariable(instance, variable))
+ return false;
+
+ var callee = edge.Exp[0];
+ if (callee.Kind != "Var")
+ return false;
+
+ assert(callee.Variable.Kind == "Func");
+ var calleeName = readable(callee.Variable.Name[0]);
+
+ // Constructor calls include the text 'Name::Name(' or 'Name<...>::Name('.
+ var openParen = calleeName.indexOf('(');
+ if (openParen < 0)
+ return false;
+ calleeName = calleeName.substring(0, openParen);
+
+ var lastColon = calleeName.lastIndexOf('::');
+ if (lastColon < 0)
+ return false;
+ var constructorName = calleeName.substr(lastColon + 2);
+ calleeName = calleeName.substr(0, lastColon);
+
+ var lastTemplateOpen = calleeName.lastIndexOf('<');
+ if (lastTemplateOpen >= 0)
+ calleeName = calleeName.substr(0, lastTemplateOpen);
+
+ if (calleeName.endsWith(constructorName))
+ return true;
+ }
+
+ return false;
+}
+
+function edgeCanGC(edge)
+{
+ if (edge.Kind != "Call")
+ return false;
+
+ var callee = edge.Exp[0];
+
+ while (callee.Kind == "Drf")
+ callee = callee.Exp[0];
+
+ if (callee.Kind == "Var") {
+ var variable = callee.Variable;
+
+ if (variable.Kind == "Func") {
+ var callee = mangled(variable.Name[0]);
+ if ((callee in gcFunctions) || ((callee + internalMarker) in gcFunctions))
+ return "'" + variable.Name[0] + "'";
+ return null;
+ }
+
+ var varName = variable.Name[0];
+ return indirectCallCannotGC(functionName, varName) ? null : "*" + varName;
+ }
+
+ if (callee.Kind == "Fld") {
+ var field = callee.Field;
+ var csuName = field.FieldCSU.Type.Name;
+ var fullFieldName = csuName + "." + field.Name[0];
+ if (fieldCallCannotGC(csuName, fullFieldName))
+ return null;
+ return (fullFieldName in suppressedFunctions) ? null : fullFieldName;
+ }
+}
+
+// Search recursively through predecessors from the use of a variable's value,
+// returning whether a GC call is reachable (in the reverse direction; this
+// means that the variable use is reachable from the GC call, and therefore the
+// variable is live after the GC call), along with some additional information.
+// What info we want depends on whether the variable turns out to be live
+// across a GC call. We are looking for both hazards (unrooted variables live
+// across GC calls) and unnecessary roots (rooted variables that have no GC
+// calls in their live ranges.)
+//
+// If not:
+//
+// - 'minimumUse': the earliest point in each body that uses the variable, for
+// reporting on unnecessary roots.
+//
+// If so:
+//
+// - 'why': a path from the GC call to a use of the variable after the GC
+// call, chained through a 'why' field in the returned edge descriptor
+//
+// - 'gcInfo': a direct pointer to the GC call edge
+//
+function findGCBeforeValueUse(start_body, start_point, suppressed, variable)
+{
+ // Scan through all edges preceding an unrooted variable use, using an
+ // explicit worklist, looking for a GC call. A worklist contains an
+ // incoming edge together with a description of where it or one of its
+ // successors GC'd (if any).
+
+ var bodies_visited = new Map();
+
+ let worklist = [{body: start_body, ppoint: start_point, preGCLive: false, gcInfo: null, why: null}];
+ while (worklist.length) {
+ // Grab an entry off of the worklist, representing a point within the
+ // CFG identified by <body,ppoint>. If this point has a descendant
+ // later in the CFG that can GC, gcInfo will be set to the information
+ // about that GC call.
+
+ var entry = worklist.pop();
+ var { body, ppoint, gcInfo, preGCLive } = entry;
+
+ // Handle the case where there are multiple ways to reach this point
+ // (traversing backwards).
+ var visited = bodies_visited.get(body);
+ if (!visited)
+ bodies_visited.set(body, visited = new Map());
+ if (visited.has(ppoint)) {
+ var seenEntry = visited.get(ppoint);
+
+ // This point already knows how to GC through some other path, so
+ // we have nothing new to learn. (The other path will consider the
+ // predecessors.)
+ if (seenEntry.gcInfo)
+ continue;
+
+ // If this worklist's entry doesn't know of any way to GC, then
+ // there's no point in continuing the traversal through it. Perhaps
+ // another edge will be found that *can* GC; otherwise, the first
+ // route to the point will traverse through predecessors.
+ //
+ // Note that this means we may visit a point more than once, if the
+ // first time we visit we don't have a known reachable GC call and
+ // the second time we do.
+ if (!gcInfo)
+ continue;
+ }
+ visited.set(ppoint, {body: body, gcInfo: gcInfo});
+
+ // Check for hitting the entry point of the current body (which may be
+ // the outer function or a loop within it.)
+ if (ppoint == body.Index[0]) {
+ if (body.BlockId.Kind == "Loop") {
+ // Propagate to outer body parents that enter the loop body.
+ if ("BlockPPoint" in body) {
+ for (var parent of body.BlockPPoint) {
+ var found = false;
+ for (var xbody of functionBodies) {
+ if (sameBlockId(xbody.BlockId, parent.BlockId)) {
+ assert(!found);
+ found = true;
+ worklist.push({body: xbody, ppoint: parent.Index,
+ gcInfo: gcInfo, why: entry});
+ }
+ }
+ assert(found);
+ }
+ }
+
+ // Also propagate to the *end* of this loop, for the previous
+ // iteration.
+ worklist.push({body: body, ppoint: body.Index[1],
+ gcInfo: gcInfo, why: entry});
+ } else if (variable.Kind == "Arg" && gcInfo) {
+ // The scope of arguments starts at the beginning of the
+ // function
+ return entry;
+ } else if (entry.preGCLive) {
+ // We didn't find a "good" explanation beginning of the live
+ // range, but we do know the variable was live across the GC.
+ // This can happen if the live range started when a variable is
+ // used as a retparam.
+ return entry;
+ }
+ }
+
+ var predecessors = getPredecessors(body);
+ if (!(ppoint in predecessors))
+ continue;
+
+ for (var edge of predecessors[ppoint]) {
+ var source = edge.Index[0];
+
+ var edge_kills = edgeKillsVariable(edge, variable);
+ var edge_uses = edgeUsesVariable(edge, variable, body);
+
+ if (edge_kills || edge_uses) {
+ if (!body.minimumUse || source < body.minimumUse)
+ body.minimumUse = source;
+ }
+
+ if (edge_kills) {
+ // This is a beginning of the variable's live range. If we can
+ // reach a GC call from here, then we're done -- we have a path
+ // from the beginning of the live range, through the GC call,
+ // to a use after the GC call that proves its live range
+ // extends at least that far.
+ if (gcInfo)
+ return {body: body, ppoint: source, gcInfo: gcInfo, why: entry };
+
+ // Otherwise, keep searching through the graph, but truncate
+ // this particular branch of the search at this edge.
+ continue;
+ }
+
+ var src_gcInfo = gcInfo;
+ var src_preGCLive = preGCLive;
+ if (!gcInfo && !(source in body.suppressed) && !suppressed) {
+ var gcName = edgeCanGC(edge, body);
+ if (gcName)
+ src_gcInfo = {name:gcName, body:body, ppoint:source};
+ }
+
+ if (edge_uses) {
+ // The live range starts at least this far back, so we're done
+ // for the same reason as with edge_kills. The only difference
+ // is that a GC on this edge indicates a hazard, whereas if
+ // we're killing a live range in the GC call then it's not live
+ // *across* the call.
+ //
+ // However, we may want to generate a longer usage chain for
+ // the variable than is minimally necessary. For example,
+ // consider:
+ //
+ // Value v = f();
+ // if (v.isUndefined())
+ // return false;
+ // gc();
+ // return v;
+ //
+ // The call to .isUndefined() is considered to be a use and
+ // therefore indicates that v must be live at that point. But
+ // it's more helpful to the user to continue the 'why' path to
+ // include the ancestor where the value was generated. So we
+ // will only return here if edge.Kind is Assign; otherwise,
+ // we'll pass a "preGCLive" value up through the worklist to
+ // remember that the variable *is* alive before the GC and so
+ // this function should be returning a true value even if we
+ // don't find an assignment.
+
+ if (src_gcInfo) {
+ src_preGCLive = true;
+ if (edge.Kind == 'Assign')
+ return {body:body, ppoint:source, gcInfo:src_gcInfo, why:entry};
+ }
+ }
+
+ if (edge.Kind == "Loop") {
+ // Additionally propagate the search into a loop body, starting
+ // with the exit point.
+ var found = false;
+ for (var xbody of functionBodies) {
+ if (sameBlockId(xbody.BlockId, edge.BlockId)) {
+ assert(!found);
+ found = true;
+ worklist.push({body:xbody, ppoint:xbody.Index[1],
+ preGCLive: src_preGCLive, gcInfo:src_gcInfo,
+ why:entry});
+ }
+ }
+ assert(found);
+ // Don't continue to predecessors here without going through
+ // the loop. (The points in this body that enter the loop will
+ // be traversed when we reach the entry point of the loop.)
+ break;
+ }
+
+ // Propagate the search to the predecessors of this edge.
+ worklist.push({body:body, ppoint:source,
+ preGCLive: src_preGCLive, gcInfo:src_gcInfo,
+ why:entry});
+ }
+ }
+
+ return null;
+}
+
+function variableLiveAcrossGC(suppressed, variable)
+{
+ // A variable is live across a GC if (1) it is used by an edge (as in, it
+ // was at least initialized), and (2) it is used after a GC in a successor
+ // edge.
+
+ for (var body of functionBodies)
+ body.minimumUse = 0;
+
+ for (var body of functionBodies) {
+ if (!("PEdge" in body))
+ continue;
+ for (var edge of body.PEdge) {
+ // Examples:
+ //
+ // JSObject* obj = NewObject();
+ // cangc();
+ // obj = NewObject(); <-- mentions 'obj' but kills previous value
+ //
+ // This is not a hazard. Contrast this with:
+ //
+ // JSObject* obj = NewObject();
+ // cangc();
+ // obj = LookAt(obj); <-- uses 'obj' and kills previous value
+ //
+ // This is a hazard; the initial value of obj is live across
+ // cangc(). And a third possibility:
+ //
+ // JSObject* obj = NewObject();
+ // obj = CopyObject(obj);
+ //
+ // This is not a hazard, because even though CopyObject can GC, obj
+ // is not live across it. (obj is live before CopyObject, and
+ // probably after, but not across.) There may be a hazard within
+ // CopyObject, of course.
+ //
+
+ var usePoint = edgeUsesVariable(edge, variable, body);
+ if (usePoint) {
+ var call = findGCBeforeValueUse(body, usePoint, suppressed, variable);
+ if (!call)
+ continue;
+
+ call.afterGCUse = usePoint;
+ return call;
+ }
+ }
+ }
+ return null;
+}
+
+// An unrooted variable has its address stored in another variable via
+// assignment, or passed into a function that can GC. If the address is
+// assigned into some other variable, we can't track it to see if it is held
+// live across a GC. If it is passed into a function that can GC, then it's
+// sort of like a Handle to an unrooted location, and the callee could GC
+// before overwriting it or rooting it.
+function unsafeVariableAddressTaken(suppressed, variable)
+{
+ for (var body of functionBodies) {
+ if (!("PEdge" in body))
+ continue;
+ for (var edge of body.PEdge) {
+ if (edgeTakesVariableAddress(edge, variable, body)) {
+ if (edge.Kind == "Assign" || (!suppressed && edgeCanGC(edge)))
+ return {body:body, ppoint:edge.Index[0]};
+ }
+ }
+ }
+ return null;
+}
+
+// Read out the brief (non-JSON, semi-human-readable) CFG description for the
+// given function and store it.
+function loadPrintedLines(functionName)
+{
+ assert(!os.system("xdbfind src_body.xdb '" + functionName + "' > " + tmpfile));
+ var lines = snarf(tmpfile).split('\n');
+
+ for (var body of functionBodies)
+ body.lines = [];
+
+ // Distribute lines of output to the block they originate from.
+ var currentBody = null;
+ for (var line of lines) {
+ if (/^block:/.test(line)) {
+ if (match = /:(loop#[\d#]+)/.exec(line)) {
+ var loop = match[1];
+ var found = false;
+ for (var body of functionBodies) {
+ if (body.BlockId.Kind == "Loop" && body.BlockId.Loop == loop) {
+ assert(!found);
+ found = true;
+ currentBody = body;
+ }
+ }
+ assert(found);
+ } else {
+ for (var body of functionBodies) {
+ if (body.BlockId.Kind == "Function")
+ currentBody = body;
+ }
+ }
+ }
+ if (currentBody)
+ currentBody.lines.push(line);
+ }
+}
+
+function findLocation(body, ppoint, opts={brief: false})
+{
+ var location = body.PPoint[ppoint - 1].Location;
+ var file = location.CacheString;
+
+ if (file.indexOf(sourceRoot) == 0)
+ file = file.substring(sourceRoot.length);
+
+ if (opts.brief) {
+ var m = /.*\/(.*)/.exec(file);
+ if (m)
+ file = m[1];
+ }
+
+ return file + ":" + location.Line;
+}
+
+function locationLine(text)
+{
+ if (match = /:(\d+)$/.exec(text))
+ return match[1];
+ return 0;
+}
+
+function printEntryTrace(functionName, entry)
+{
+ var gcPoint = entry.gcInfo ? entry.gcInfo.ppoint : 0;
+
+ if (!functionBodies[0].lines)
+ loadPrintedLines(functionName);
+
+ while (entry) {
+ var ppoint = entry.ppoint;
+ var lineText = findLocation(entry.body, ppoint, {"brief": true});
+
+ var edgeText = "";
+ if (entry.why && entry.why.body == entry.body) {
+ // If the next point in the trace is in the same block, look for an edge between them.
+ var next = entry.why.ppoint;
+
+ if (!entry.body.edgeTable) {
+ var table = {};
+ entry.body.edgeTable = table;
+ for (var line of entry.body.lines) {
+ if (match = /^\w+\((\d+,\d+),/.exec(line))
+ table[match[1]] = line; // May be multiple?
+ }
+ if (entry.body.BlockId.Kind == 'Loop') {
+ const [startPoint, endPoint] = entry.body.Index;
+ table[`${endPoint},${startPoint}`] = '(loop to next iteration)';
+ }
+ }
+
+ edgeText = entry.body.edgeTable[ppoint + "," + next];
+ assert(edgeText);
+ if (ppoint == gcPoint)
+ edgeText += " [[GC call]]";
+ } else {
+ // Look for any outgoing edge from the chosen point.
+ for (var line of entry.body.lines) {
+ if (match = /\((\d+),/.exec(line)) {
+ if (match[1] == ppoint) {
+ edgeText = line;
+ break;
+ }
+ }
+ }
+ if (ppoint == entry.body.Index[1] && entry.body.BlockId.Kind == "Function")
+ edgeText += " [[end of function]]";
+ }
+
+ print(" " + lineText + (edgeText.length ? ": " + edgeText : ""));
+ entry = entry.why;
+ }
+}
+
+function isRootedType(type)
+{
+ return type.Kind == "CSU" && isRootedTypeName(type.Name);
+}
+
+function typeDesc(type)
+{
+ if (type.Kind == "CSU") {
+ return type.Name;
+ } else if ('Type' in type) {
+ var inner = typeDesc(type.Type);
+ if (type.Kind == 'Pointer')
+ return inner + '*';
+ else if (type.Kind == 'Array')
+ return inner + '[]';
+ else
+ return inner + '?';
+ } else {
+ return '???';
+ }
+}
+
+function processBodies(functionName)
+{
+ if (!("DefineVariable" in functionBodies[0]))
+ return;
+ var suppressed = (mangled(functionName) in suppressedFunctions);
+ for (var variable of functionBodies[0].DefineVariable) {
+ var name;
+ if (variable.Variable.Kind == "This")
+ name = "this";
+ else if (variable.Variable.Kind == "Return")
+ name = "<returnvalue>";
+ else
+ name = variable.Variable.Name[0];
+
+ if (isRootedType(variable.Type)) {
+ if (!variableLiveAcrossGC(suppressed, variable.Variable)) {
+ // The earliest use of the variable should be its constructor.
+ var lineText;
+ for (var body of functionBodies) {
+ if (body.minimumUse) {
+ var text = findLocation(body, body.minimumUse);
+ if (!lineText || locationLine(lineText) > locationLine(text))
+ lineText = text;
+ }
+ }
+ print("\nFunction '" + functionName + "'" +
+ " has unnecessary root '" + name + "' at " + lineText);
+ }
+ } else if (isUnrootedType(variable.Type)) {
+ var result = variableLiveAcrossGC(suppressed, variable.Variable);
+ if (result) {
+ var lineText = findLocation(result.gcInfo.body, result.gcInfo.ppoint);
+ print("\nFunction '" + functionName + "'" +
+ " has unrooted '" + name + "'" +
+ " of type '" + typeDesc(variable.Type) + "'" +
+ " live across GC call " + result.gcInfo.name +
+ " at " + lineText);
+ printEntryTrace(functionName, result);
+ }
+ result = unsafeVariableAddressTaken(suppressed, variable.Variable);
+ if (result) {
+ var lineText = findLocation(result.body, result.ppoint);
+ print("\nFunction '" + functionName + "'" +
+ " takes unsafe address of unrooted '" + name + "'" +
+ " at " + lineText);
+ printEntryTrace(functionName, {body:result.body, ppoint:result.ppoint});
+ }
+ }
+ }
+}
+
+if (batch == 1)
+ print("Time: " + new Date);
+
+var xdb = xdbLibrary();
+xdb.open("src_body.xdb");
+
+var minStream = xdb.min_data_stream()|0;
+var maxStream = xdb.max_data_stream()|0;
+
+var N = (maxStream - minStream) + 1;
+var start = Math.floor((batch - 1) / numBatches * N) + minStream;
+var start_next = Math.floor(batch / numBatches * N) + minStream;
+var end = start_next - 1;
+
+function process(name, json) {
+ functionName = name;
+ functionBodies = JSON.parse(json);
+
+ for (var body of functionBodies)
+ body.suppressed = [];
+ for (var body of functionBodies) {
+ for (var [pbody, id] of allRAIIGuardedCallPoints(functionBodies, body, isSuppressConstructor))
+ pbody.suppressed[id] = true;
+ }
+ processBodies(functionName);
+}
+
+if (theFunctionNameToFind) {
+ var data = xdb.read_entry(theFunctionNameToFind);
+ var json = data.readString();
+ process(theFunctionNameToFind, json);
+ xdb.free_string(data);
+ quit(0);
+}
+
+for (var nameIndex = start; nameIndex <= end; nameIndex++) {
+ var name = xdb.read_key(nameIndex);
+ var functionName = name.readString();
+ var data = xdb.read_entry(name);
+ xdb.free_string(name);
+ var json = data.readString();
+ try {
+ process(functionName, json);
+ } catch (e) {
+ printErr("Exception caught while handling " + functionName);
+ throw(e);
+ }
+ xdb.free_string(data);
+}
diff --git a/js/src/devtools/rootAnalysis/annotations.js b/js/src/devtools/rootAnalysis/annotations.js
new file mode 100644
index 000000000..5b798516f
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/annotations.js
@@ -0,0 +1,404 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+// RAII types within which we should assume GC is suppressed, eg
+// AutoSuppressGC.
+var GCSuppressionTypes = [];
+
+// Ignore calls made through these function pointers
+var ignoreIndirectCalls = {
+ "mallocSizeOf" : true,
+ "aMallocSizeOf" : true,
+ "_malloc_message" : true,
+ "je_malloc_message" : true,
+ "chunk_dalloc" : true,
+ "chunk_alloc" : true,
+ "__conv" : true,
+ "__convf" : true,
+ "prerrortable.c:callback_newtable" : true,
+ "mozalloc_oom.cpp:void (* gAbortHandler)(size_t)" : true,
+};
+
+function indirectCallCannotGC(fullCaller, fullVariable)
+{
+ var caller = readable(fullCaller);
+
+ // This is usually a simple variable name, but sometimes a full name gets
+ // passed through. And sometimes that name is truncated. Examples:
+ // _ZL13gAbortHandler|mozalloc_oom.cpp:void (* gAbortHandler)(size_t)
+ // _ZL14pMutexUnlockFn|umutex.cpp:void (* pMutexUnlockFn)(const void*
+ var name = readable(fullVariable);
+
+ if (name in ignoreIndirectCalls)
+ return true;
+
+ if (name == "mapper" && caller == "ptio.c:pt_MapError")
+ return true;
+
+ if (name == "params" && caller == "PR_ExplodeTime")
+ return true;
+
+ if (name == "op" && /GetWeakmapKeyDelegate/.test(caller))
+ return true;
+
+ // hook called during script finalization which cannot GC.
+ if (/CallDestroyScriptHook/.test(caller))
+ return true;
+
+ // template method called during marking and hence cannot GC
+ if (name == "op" && caller.indexOf("bool js::WeakMap<Key, Value, HashPolicy>::keyNeedsMark(JSObject*)") != -1)
+ {
+ return true;
+ }
+
+ return false;
+}
+
+// Ignore calls through functions pointers with these types
+var ignoreClasses = {
+ "JSStringFinalizer" : true,
+ "SprintfState" : true,
+ "SprintfStateStr" : true,
+ "JSLocaleCallbacks" : true,
+ "JSC::ExecutableAllocator" : true,
+ "PRIOMethods": true,
+ "XPCOMFunctions" : true, // I'm a little unsure of this one
+ "_MD_IOVector" : true,
+ "malloc_table_t": true, // replace_malloc
+ "malloc_hook_table_t": true, // replace_malloc
+};
+
+// Ignore calls through TYPE.FIELD, where TYPE is the class or struct name containing
+// a function pointer field named FIELD.
+var ignoreCallees = {
+ "js::ClassOps.trace" : true,
+ "js::ClassOps.finalize" : true,
+ "JSRuntime.destroyPrincipals" : true,
+ "icu_50::UObject.__deleting_dtor" : true, // destructors in ICU code can't cause GC
+ "mozilla::CycleCollectedJSContext.DescribeCustomObjects" : true, // During tracing, cannot GC.
+ "mozilla::CycleCollectedJSContext.NoteCustomGCThingXPCOMChildren" : true, // During tracing, cannot GC.
+ "PLDHashTableOps.hashKey" : true,
+ "z_stream_s.zfree" : true,
+ "z_stream_s.zalloc" : true,
+ "GrGLInterface.fCallback" : true,
+ "std::strstreambuf._M_alloc_fun" : true,
+ "std::strstreambuf._M_free_fun" : true,
+ "struct js::gc::Callback<void (*)(JSContext*, void*)>.op" : true,
+ "mozilla::ThreadSharedFloatArrayBufferList::Storage.mFree" : true,
+};
+
+function fieldCallCannotGC(csu, fullfield)
+{
+ if (csu in ignoreClasses)
+ return true;
+ if (fullfield in ignoreCallees)
+ return true;
+ return false;
+}
+
+function ignoreEdgeUse(edge, variable, body)
+{
+ // Horrible special case for ignoring a false positive in xptcstubs: there
+ // is a local variable 'paramBuffer' holding an array of nsXPTCMiniVariant
+ // on the stack, which appears to be live across a GC call because its
+ // constructor is called when the array is initialized, even though the
+ // constructor is a no-op. So we'll do a very narrow exclusion for the use
+ // that incorrectly started the live range, which was basically "__temp_1 =
+ // paramBuffer".
+ //
+ // By scoping it so narrowly, we can detect most hazards that would be
+ // caused by modifications in the PrepareAndDispatch code. It just barely
+ // avoids having a hazard already.
+ if (('Name' in variable) && (variable.Name[0] == 'paramBuffer')) {
+ if (body.BlockId.Kind == 'Function' && body.BlockId.Variable.Name[0] == 'PrepareAndDispatch')
+ if (edge.Kind == 'Assign' && edge.Type.Kind == 'Pointer')
+ if (edge.Exp[0].Kind == 'Var' && edge.Exp[1].Kind == 'Var')
+ if (edge.Exp[1].Variable.Kind == 'Local' && edge.Exp[1].Variable.Name[0] == 'paramBuffer')
+ return true;
+ }
+
+ // Functions which should not be treated as using variable.
+ if (edge.Kind == "Call") {
+ var callee = edge.Exp[0];
+ if (callee.Kind == "Var") {
+ var name = callee.Variable.Name[0];
+ if (/~DebugOnly/.test(name))
+ return true;
+ if (/~ScopedThreadSafeStringInspector/.test(name))
+ return true;
+ }
+ }
+
+ return false;
+}
+
+function ignoreEdgeAddressTaken(edge)
+{
+ // Functions which may take indirect pointers to unrooted GC things,
+ // but will copy them into rooted locations before calling anything
+ // that can GC. These parameters should usually be replaced with
+ // handles or mutable handles.
+ if (edge.Kind == "Call") {
+ var callee = edge.Exp[0];
+ if (callee.Kind == "Var") {
+ var name = callee.Variable.Name[0];
+ if (/js::Invoke\(/.test(name))
+ return true;
+ }
+ }
+
+ return false;
+}
+
+// Return whether csu.method is one that we claim can never GC.
+function isSuppressedVirtualMethod(csu, method)
+{
+ return csu == "nsISupports" && (method == "AddRef" || method == "Release");
+}
+
+// Ignore calls of these functions (so ignore any stack containing these)
+var ignoreFunctions = {
+ "ptio.c:pt_MapError" : true,
+ "je_malloc_printf" : true,
+ "vprintf_stderr" : true,
+ "PR_ExplodeTime" : true,
+ "PR_ErrorInstallTable" : true,
+ "PR_SetThreadPrivate" : true,
+ "JSObject* js::GetWeakmapKeyDelegate(JSObject*)" : true, // FIXME: mark with AutoSuppressGCAnalysis instead
+ "uint8 NS_IsMainThread()" : true,
+
+ // Has an indirect call under it by the name "__f", which seemed too
+ // generic to ignore by itself.
+ "void* std::_Locale_impl::~_Locale_impl(int32)" : true,
+
+ // Bug 1056410 - devirtualization prevents the standard nsISupports::Release heuristic from working
+ "uint32 nsXPConnect::Release()" : true,
+
+ // FIXME!
+ "NS_LogInit": true,
+ "NS_LogTerm": true,
+ "NS_LogAddRef": true,
+ "NS_LogRelease": true,
+ "NS_LogCtor": true,
+ "NS_LogDtor": true,
+ "NS_LogCOMPtrAddRef": true,
+ "NS_LogCOMPtrRelease": true,
+
+ // FIXME!
+ "NS_DebugBreak": true,
+
+ // These are a little overzealous -- these destructors *can* GC if they end
+ // up wrapping a pending exception. See bug 898815 for the heavyweight fix.
+ "void js::AutoCompartment::~AutoCompartment(int32)" : true,
+ "void JSAutoCompartment::~JSAutoCompartment(int32)" : true,
+
+ // The nsScriptNameSpaceManager functions can't actually GC. They
+ // just use a PLDHashTable which has function pointers, which makes the
+ // analysis think maybe they can.
+ "nsGlobalNameStruct* nsScriptNameSpaceManager::LookupNavigatorName(nsAString_internal*)": true,
+ "nsGlobalNameStruct* nsScriptNameSpaceManager::LookupName(nsAString_internal*, uint16**)": true,
+
+ // Similar to heap snapshot mock classes, and GTests below. This posts a
+ // synchronous runnable when a GTest fails, and we are pretty sure that the
+ // particular runnable it posts can't even GC, but the analysis isn't
+ // currently smart enough to determine that. In either case, this is (a)
+ // only in GTests, and (b) only when the Gtest has already failed. We have
+ // static and dynamic checks for no GC in the non-test code, and in the test
+ // code we fall back to only the dynamic checks.
+ "void test::RingbufferDumper::OnTestPartResult(testing::TestPartResult*)" : true,
+
+ "float64 JS_GetCurrentEmbedderTime()" : true,
+
+ "uint64 js::TenuringTracer::moveObjectToTenured(JSObject*, JSObject*, int32)" : true,
+ "uint32 js::TenuringTracer::moveObjectToTenured(JSObject*, JSObject*, int32)" : true,
+ "void js::Nursery::freeMallocedBuffers()" : true,
+
+ // It would be cool to somehow annotate that nsTHashtable<T> will use
+ // nsTHashtable<T>::s_MatchEntry for its matchEntry function pointer, but
+ // there is no mechanism for that. So we will just annotate a particularly
+ // troublesome logging-related usage.
+ "EntryType* nsTHashtable<EntryType>::PutEntry(nsTHashtable<EntryType>::KeyType, const fallible_t&) [with EntryType = nsBaseHashtableET<nsCharPtrHashKey, nsAutoPtr<mozilla::LogModule> >; nsTHashtable<EntryType>::KeyType = const char*; nsTHashtable<EntryType>::fallible_t = mozilla::fallible_t]" : true,
+ "EntryType* nsTHashtable<EntryType>::GetEntry(nsTHashtable<EntryType>::KeyType) const [with EntryType = nsBaseHashtableET<nsCharPtrHashKey, nsAutoPtr<mozilla::LogModule> >; nsTHashtable<EntryType>::KeyType = const char*]" : true,
+ "EntryType* nsTHashtable<EntryType>::PutEntry(nsTHashtable<EntryType>::KeyType) [with EntryType = nsBaseHashtableET<nsPtrHashKey<const mozilla::BlockingResourceBase>, nsAutoPtr<mozilla::DeadlockDetector<mozilla::BlockingResourceBase>::OrderingEntry> >; nsTHashtable<EntryType>::KeyType = const mozilla::BlockingResourceBase*]" : true,
+ "EntryType* nsTHashtable<EntryType>::GetEntry(nsTHashtable<EntryType>::KeyType) const [with EntryType = nsBaseHashtableET<nsPtrHashKey<const mozilla::BlockingResourceBase>, nsAutoPtr<mozilla::DeadlockDetector<mozilla::BlockingResourceBase>::OrderingEntry> >; nsTHashtable<EntryType>::KeyType = const mozilla::BlockingResourceBase*]" : true,
+
+ // The big hammers.
+ "PR_GetCurrentThread" : true,
+ "calloc" : true,
+};
+
+function extraGCFunctions() {
+ return ["ffi_call"].filter(f => f in readableNames);
+}
+function isProtobuf(name)
+{
+ return name.match(/\bgoogle::protobuf\b/) ||
+ name.match(/\bmozilla::devtools::protobuf\b/);
+}
+
+function isHeapSnapshotMockClass(name)
+{
+ return name.match(/\bMockWriter\b/) ||
+ name.match(/\bMockDeserializedNode\b/);
+}
+
+function isGTest(name)
+{
+ return name.match(/\btesting::/);
+}
+
+function ignoreGCFunction(mangled)
+{
+ assert(mangled in readableNames, mangled + " not in readableNames");
+ var fun = readableNames[mangled][0];
+
+ if (fun in ignoreFunctions)
+ return true;
+
+ // The protobuf library, and [de]serialization code generated by the
+ // protobuf compiler, uses a _ton_ of function pointers but they are all
+ // internal. Easiest to just ignore that mess here.
+ if (isProtobuf(fun))
+ return true;
+
+ // Ignore anything that goes through heap snapshot GTests or mocked classes
+ // used in heap snapshot GTests. GTest and GMock expose a lot of virtual
+ // methods and function pointers that could potentially GC after an
+ // assertion has already failed (depending on user-provided code), but don't
+ // exhibit that behavior currently. For non-test code, we have dynamic and
+ // static checks that ensure we don't GC. However, for test code we opt out
+ // of static checks here, because of the above stated GMock/GTest issues,
+ // and rely on only the dynamic checks provided by AutoAssertCannotGC.
+ if (isHeapSnapshotMockClass(fun) || isGTest(fun))
+ return true;
+
+ // Templatized function
+ if (fun.indexOf("void nsCOMPtr<T>::Assert_NoQueryNeeded()") >= 0)
+ return true;
+
+ // These call through an 'op' function pointer.
+ if (fun.indexOf("js::WeakMap<Key, Value, HashPolicy>::getDelegate(") >= 0)
+ return true;
+
+ // XXX modify refillFreeList<NoGC> to not need data flow analysis to understand it cannot GC.
+ if (/refillFreeList/.test(fun) && /\(js::AllowGC\)0u/.test(fun))
+ return true;
+ return false;
+}
+
+function stripUCSAndNamespace(name)
+{
+ name = name.replace(/(struct|class|union|const) /g, "");
+ name = name.replace(/(js::ctypes::|js::|JS::|mozilla::dom::|mozilla::)/g, "");
+ return name;
+}
+
+function isRootedGCTypeName(name)
+{
+ return (name == "JSAddonId");
+}
+
+function isRootedGCPointerTypeName(name)
+{
+ name = stripUCSAndNamespace(name);
+
+ if (name.startsWith('MaybeRooted<'))
+ return /\(js::AllowGC\)1u>::RootType/.test(name);
+
+ if (name == "ErrorResult" ||
+ name == "JSErrorResult" ||
+ name == "WrappableJSErrorResult" ||
+ name == "binding_detail::FastErrorResult" ||
+ name == "IgnoredErrorResult" ||
+ name == "frontend::TokenStream" ||
+ name == "frontend::TokenStream::Position" ||
+ name == "ModuleValidator")
+ {
+ return true;
+ }
+
+ return name.startsWith('Rooted') || name.startsWith('PersistentRooted');
+}
+
+function isRootedTypeName(name)
+{
+ return isRootedGCTypeName(name) || isRootedGCPointerTypeName(name);
+}
+
+function isUnsafeStorage(typeName)
+{
+ typeName = stripUCSAndNamespace(typeName);
+ return typeName.startsWith('UniquePtr<');
+}
+
+function isSuppressConstructor(edgeType, varName)
+{
+ // Check whether this could be a constructor
+ if (edgeType.Kind != 'Function')
+ return false;
+ if (!('TypeFunctionCSU' in edgeType))
+ return false;
+ if (edgeType.Type.Kind != 'Void')
+ return false;
+
+ // Check whether the type is a known suppression type.
+ var type = edgeType.TypeFunctionCSU.Type.Name;
+ if (GCSuppressionTypes.indexOf(type) == -1)
+ return false;
+
+ // And now make sure this is the constructor, not some other method on a
+ // suppression type. varName[0] contains the qualified name.
+ var [ mangled, unmangled ] = splitFunction(varName[0]);
+ if (mangled.search(/C\dE/) == -1)
+ return false; // Mangled names of constructors have C<num>E
+ var m = unmangled.match(/([~\w]+)(?:<.*>)?\(/);
+ if (!m)
+ return false;
+ var type_stem = type.replace(/\w+::/g, '').replace(/\<.*\>/g, '');
+ return m[1] == type_stem;
+}
+
+// nsISupports subclasses' methods may be scriptable (or overridden
+// via binary XPCOM), and so may GC. But some fields just aren't going
+// to get overridden with something that can GC.
+function isOverridableField(initialCSU, csu, field)
+{
+ if (csu != 'nsISupports')
+ return false;
+
+ // Now that binary XPCOM is dead, all these annotations should be replaced
+ // with something based on bug 1347999.
+ if (field == 'GetCurrentJSContext')
+ return false;
+ if (field == 'IsOnCurrentThread')
+ return false;
+ if (field == 'GetNativeContext')
+ return false;
+ if (field == "GetGlobalJSObject")
+ return false;
+ if (field == "GetIsMainThread")
+ return false;
+ if (initialCSU == 'nsIXPConnectJSObjectHolder' && field == 'GetJSObject')
+ return false;
+ if (initialCSU == 'nsIXPConnect' && field == 'GetSafeJSContext')
+ return false;
+
+ // nsIScriptSecurityManager is not [builtinclass], but smaug says "the
+ // interface definitely should be builtinclass", which is good enough.
+ if (initialCSU == 'nsIScriptSecurityManager' && field == 'IsSystemPrincipal')
+ return false;
+
+ if (initialCSU == 'nsIScriptContext') {
+ if (field == 'GetWindowProxy' || field == 'GetWindowProxyPreserveColor')
+ return false;
+ }
+ return true;
+}
+
+function listNonGCPointers() {
+ return [
+ // Safe only because jsids are currently only made from pinned strings.
+ 'NPIdentifier',
+ ];
+}
diff --git a/js/src/devtools/rootAnalysis/build.js b/js/src/devtools/rootAnalysis/build.js
new file mode 100755
index 000000000..d934c5663
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/build.js
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+set -e
+
+cd $SOURCE
+make -f client.mk configure
+make -C $ANALYZED_OBJDIR export
+./mach build -X nsprpub mfbt memory memory/mozalloc modules/zlib mozglue js/src xpcom/glue js/ductwork/debugger js/ipc js/xpconnect/loader js/xpconnect/wrappers js/xpconnect/src
+status=$?
+echo "[[[[ build.js complete, exit code $status ]]]]"
+exit $status
diff --git a/js/src/devtools/rootAnalysis/build/gcc-b2g.manifest b/js/src/devtools/rootAnalysis/build/gcc-b2g.manifest
new file mode 100644
index 000000000..0d5eeb050
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/build/gcc-b2g.manifest
@@ -0,0 +1,11 @@
+[
+{
+"version": "gcc 4.9.3",
+"size": 102421980,
+"visibility": "public",
+"digest": "f25292aa93dc449e0472eee511c0ac15b5f1a4272ab76cf53ce5d20dc57f29e83da49ae1a9d9e994192647f75e13ae60f75ba2ac3cb9d26d5f5d6cabf88de921",
+"algorithm": "sha512",
+"filename": "gcc.tar.xz",
+"unpack": true
+}
+]
diff --git a/js/src/devtools/rootAnalysis/build/gcc.manifest b/js/src/devtools/rootAnalysis/build/gcc.manifest
new file mode 100644
index 000000000..21b570f1c
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/build/gcc.manifest
@@ -0,0 +1,19 @@
+[
+{
+"version": "gcc 4.9.3",
+"size": 102421980,
+"visibility": "public",
+"digest": "f25292aa93dc449e0472eee511c0ac15b5f1a4272ab76cf53ce5d20dc57f29e83da49ae1a9d9e994192647f75e13ae60f75ba2ac3cb9d26d5f5d6cabf88de921",
+"algorithm": "sha512",
+"filename": "gcc.tar.xz",
+"unpack": true
+},
+{
+"size": 12072532,
+"digest": "3915f8ec396c56a8a92e6f9695b70f09ce9d1582359d1258e37e3fd43a143bc974410e4cfc27f500e095f34a8956206e0ebf799b7287f0f38def0d5e34ed71c9",
+"algorithm": "sha512",
+"filename": "gtk3.tar.xz",
+"setup": "setup.sh",
+"unpack": true
+}
+]
diff --git a/js/src/devtools/rootAnalysis/build/sixgill-b2g.manifest b/js/src/devtools/rootAnalysis/build/sixgill-b2g.manifest
new file mode 100644
index 000000000..1ecb5d066
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/build/sixgill-b2g.manifest
@@ -0,0 +1,10 @@
+[
+{
+"hg_id" : "ec7b7d2442e8",
+"algorithm" : "sha512",
+"digest" : "49627d734df52cb9e7319733da5a6be1812b9373355dc300ee5600b431122570e00d380d50c7c5b5003c462c2c2cb022494b42c4ad00f8eba01c2259cbe6e502",
+"filename" : "sixgill.tar.xz",
+"size" : 2628868,
+"unpack" : true
+}
+]
diff --git a/js/src/devtools/rootAnalysis/build/sixgill.manifest b/js/src/devtools/rootAnalysis/build/sixgill.manifest
new file mode 100644
index 000000000..d02bb1bf4
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/build/sixgill.manifest
@@ -0,0 +1,10 @@
+[
+{
+"digest" : "36dc644e24c0aa824975ad8f5c15714445d5cb064d823000c3cb637e885199414d7df551e6b99233f0656dcf5760918192ef04113c486af37f3c489bb93ad029",
+"size" : 2631908,
+"hg_id" : "8cb9c3fb039a+ tip",
+"unpack" : true,
+"filename" : "sixgill.tar.xz",
+"algorithm" : "sha512"
+}
+]
diff --git a/js/src/devtools/rootAnalysis/computeCallgraph.js b/js/src/devtools/rootAnalysis/computeCallgraph.js
new file mode 100644
index 000000000..dab3f7621
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/computeCallgraph.js
@@ -0,0 +1,435 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+loadRelativeToScript('utility.js');
+loadRelativeToScript('annotations.js');
+loadRelativeToScript('CFG.js');
+
+var theFunctionNameToFind;
+if (scriptArgs[0] == '--function') {
+ theFunctionNameToFind = scriptArgs[1];
+ scriptArgs = scriptArgs.slice(2);
+}
+
+var typeInfo_filename = scriptArgs[0] || "typeInfo.txt";
+
+var subclasses = new Map(); // Map from csu => set of immediate subclasses
+var superclasses = new Map(); // Map from csu => set of immediate superclasses
+var classFunctions = new Map(); // Map from "csu:name" => set of full method name
+
+var virtualResolutionsSeen = new Set();
+
+function addEntry(map, name, entry)
+{
+ if (!map.has(name))
+ map.set(name, new Set());
+ map.get(name).add(entry);
+}
+
+// CSU is "Class/Struct/Union"
+function processCSU(csuName, csu)
+{
+ if (!("FunctionField" in csu))
+ return;
+ for (var field of csu.FunctionField) {
+ if (1 in field.Field) {
+ var superclass = field.Field[1].Type.Name;
+ var subclass = field.Field[1].FieldCSU.Type.Name;
+ assert(subclass == csuName);
+ addEntry(subclasses, superclass, subclass);
+ addEntry(superclasses, subclass, superclass);
+ }
+ if ("Variable" in field) {
+ // Note: not dealing with overloading correctly.
+ var name = field.Variable.Name[0];
+ var key = csuName + ":" + field.Field[0].Name[0];
+ addEntry(classFunctions, key, name);
+ }
+ }
+}
+
+// Return the nearest ancestor method definition, or all nearest definitions in
+// the case of multiple inheritance.
+function nearestAncestorMethods(csu, method)
+{
+ var key = csu + ":" + method;
+
+ if (classFunctions.has(key))
+ return new Set(classFunctions.get(key));
+
+ var functions = new Set();
+ if (superclasses.has(csu)) {
+ for (var parent of superclasses.get(csu))
+ functions.update(nearestAncestorMethods(parent, method));
+ }
+
+ return functions;
+}
+
+// Return [ instantations, suppressed ], where instantiations is a Set of all
+// possible implementations of 'field' given static type 'initialCSU', plus
+// null if arbitrary other implementations are possible, and suppressed is true
+// if we the method is assumed to be non-GC'ing by annotation.
+function findVirtualFunctions(initialCSU, field)
+{
+ var worklist = [initialCSU];
+ var functions = new Set();
+
+ // Loop through all methods of initialCSU (by looking at all methods of ancestor csus).
+ //
+ // If field is nsISupports::AddRef or ::Release, return an empty list and a
+ // boolean that says we assert that it cannot GC.
+ //
+ // If this is a method that is annotated to be dangerous (eg, it could be
+ // overridden with an implementation that could GC), then use null as a
+ // signal value that it should be considered to GC, even though we'll also
+ // collect all of the instantiations for other purposes.
+
+ while (worklist.length) {
+ var csu = worklist.pop();
+ if (isSuppressedVirtualMethod(csu, field))
+ return [ new Set(), true ];
+ if (isOverridableField(initialCSU, csu, field)) {
+ // We will still resolve the virtual function call, because it's
+ // nice to have as complete a callgraph as possible for other uses.
+ // But push a token saying that we can run arbitrary code.
+ functions.add(null);
+ }
+
+ if (superclasses.has(csu))
+ worklist.push(...superclasses.get(csu));
+ }
+
+ // Now return a list of all the instantiations of the method named 'field'
+ // that could execute on an instance of initialCSU or a descendant class.
+
+ // Start with the class itself, or if it doesn't define the method, all
+ // nearest ancestor definitions.
+ functions.update(nearestAncestorMethods(initialCSU, field));
+
+ // Then recurse through all descendants to add in their definitions.
+ var worklist = [initialCSU];
+ while (worklist.length) {
+ var csu = worklist.pop();
+ var key = csu + ":" + field;
+
+ if (classFunctions.has(key))
+ functions.update(classFunctions.get(key));
+
+ if (subclasses.has(csu))
+ worklist.push(...subclasses.get(csu));
+ }
+
+ return [ functions, false ];
+}
+
+var memoized = new Map();
+var memoizedCount = 0;
+
+function memo(name)
+{
+ if (!memoized.has(name)) {
+ let id = memoized.size + 1;
+ memoized.set(name, "" + id);
+ print(`#${id} ${name}`);
+ }
+ return memoized.get(name);
+}
+
+// Return a list of all callees that the given edge might be a call to. Each
+// one is represented by an object with a 'kind' field that is one of
+// ('direct', 'field', 'resolved-field', 'indirect', 'unknown'), though note
+// that 'resolved-field' is really a global record of virtual method
+// resolutions, indepedent of this particular edge.
+function getCallees(edge)
+{
+ if (edge.Kind != "Call")
+ return [];
+
+ var callee = edge.Exp[0];
+ var callees = [];
+ if (callee.Kind == "Var") {
+ assert(callee.Variable.Kind == "Func");
+ callees.push({'kind': 'direct', 'name': callee.Variable.Name[0]});
+ } else {
+ assert(callee.Kind == "Drf");
+ if (callee.Exp[0].Kind == "Fld") {
+ var field = callee.Exp[0].Field;
+ var fieldName = field.Name[0];
+ var csuName = field.FieldCSU.Type.Name;
+ var functions;
+ if ("FieldInstanceFunction" in field) {
+ let suppressed;
+ [ functions, suppressed ] = findVirtualFunctions(csuName, fieldName, suppressed);
+ if (suppressed) {
+ // Field call known to not GC; mark it as suppressed so
+ // direct invocations will be ignored
+ callees.push({'kind': "field", 'csu': csuName, 'field': fieldName,
+ 'suppressed': true});
+ }
+ } else {
+ functions = new Set([null]); // field call
+ }
+
+ // Known set of virtual call targets. Treat them as direct calls to
+ // all possible resolved types, but also record edges from this
+ // field call to each final callee. When the analysis is checking
+ // whether an edge can GC and it sees an unrooted pointer held live
+ // across this field call, it will know whether any of the direct
+ // callees can GC or not.
+ var targets = [];
+ var fullyResolved = true;
+ for (var name of functions) {
+ if (name === null) {
+ // Unknown set of call targets, meaning either a function
+ // pointer call ("field call") or a virtual method that can
+ // be overridden in extensions.
+ callees.push({'kind': "field", 'csu': csuName, 'field': fieldName});
+ fullyResolved = false;
+ } else {
+ callees.push({'kind': "direct", 'name': name});
+ targets.push({'kind': "direct", 'name': name});
+ }
+ }
+ if (fullyResolved)
+ callees.push({'kind': "resolved-field", 'csu': csuName, 'field': fieldName, 'callees': targets});
+ } else if (callee.Exp[0].Kind == "Var") {
+ // indirect call through a variable.
+ callees.push({'kind': "indirect", 'variable': callee.Exp[0].Variable.Name[0]});
+ } else {
+ // unknown call target.
+ callees.push({'kind': "unknown"});
+ }
+ }
+
+ return callees;
+}
+
+var lastline;
+function printOnce(line)
+{
+ if (line != lastline) {
+ print(line);
+ lastline = line;
+ }
+}
+
+// Returns a table mapping function name to lists of [annotation-name,
+// annotation-value] pairs: { function-name => [ [annotation-name, annotation-value] ] }
+function getAnnotations(body)
+{
+ var all_annotations = {};
+ for (var v of (body.DefineVariable || [])) {
+ if (v.Variable.Kind != 'Func')
+ continue;
+ var name = v.Variable.Name[0];
+ var annotations = all_annotations[name] = [];
+
+ for (var ann of (v.Type.Annotation || [])) {
+ annotations.push(ann.Name);
+ }
+ }
+
+ return all_annotations;
+}
+
+function getTags(functionName, body) {
+ var tags = new Set();
+ var annotations = getAnnotations(body);
+ if (functionName in annotations) {
+ for (var [ annName, annValue ] of annotations[functionName]) {
+ if (annName == 'Tag')
+ tags.add(annValue);
+ }
+ }
+ return tags;
+}
+
+function processBody(functionName, body)
+{
+ if (!('PEdge' in body))
+ return;
+
+ for (var tag of getTags(functionName, body).values())
+ print("T " + memo(functionName) + " " + tag);
+
+ // Set of all callees that have been output so far, in order to suppress
+ // repeated callgraph edges from being recorded. Use a separate set for
+ // suppressed callees, since we don't want a suppressed edge (within one
+ // RAII scope) to prevent an unsuppressed edge from being recorded. The
+ // seen array is indexed by a boolean 'suppressed' variable.
+ var seen = [ new Set(), new Set() ];
+
+ lastline = null;
+ for (var edge of body.PEdge) {
+ if (edge.Kind != "Call")
+ continue;
+
+ // Whether this call is within the RAII scope of a GC suppression class
+ var edgeSuppressed = (edge.Index[0] in body.suppressed);
+
+ for (var callee of getCallees(edge)) {
+ var suppressed = Boolean(edgeSuppressed || callee.suppressed);
+ var prologue = suppressed ? "SUPPRESS_GC " : "";
+ prologue += memo(functionName) + " ";
+ if (callee.kind == 'direct') {
+ if (!seen[+suppressed].has(callee.name)) {
+ seen[+suppressed].add(callee.name);
+ printOnce("D " + prologue + memo(callee.name));
+ }
+ } else if (callee.kind == 'field') {
+ var { csu, field } = callee;
+ printOnce("F " + prologue + "CLASS " + csu + " FIELD " + field);
+ } else if (callee.kind == 'resolved-field') {
+ // Fully-resolved field (virtual method) call. Record the
+ // callgraph edges. Do not consider suppression, since it is
+ // local to this callsite and we are writing out a global
+ // record here.
+ //
+ // Any field call that does *not* have an R entry must be
+ // assumed to call anything.
+ var { csu, field, callees } = callee;
+ var fullFieldName = csu + "." + field;
+ if (!virtualResolutionsSeen.has(fullFieldName)) {
+ virtualResolutionsSeen.add(fullFieldName);
+ for (var target of callees)
+ printOnce("R " + memo(fullFieldName) + " " + memo(target.name));
+ }
+ } else if (callee.kind == 'indirect') {
+ printOnce("I " + prologue + "VARIABLE " + callee.variable);
+ } else if (callee.kind == 'unknown') {
+ printOnce("I " + prologue + "VARIABLE UNKNOWN");
+ } else {
+ printErr("invalid " + callee.kind + " callee");
+ debugger;
+ }
+ }
+ }
+}
+
+GCSuppressionTypes = loadTypeInfo(typeInfo_filename)["Suppress GC"] || [];
+
+var xdb = xdbLibrary();
+xdb.open("src_comp.xdb");
+
+var minStream = xdb.min_data_stream();
+var maxStream = xdb.max_data_stream();
+
+for (var csuIndex = minStream; csuIndex <= maxStream; csuIndex++) {
+ var csu = xdb.read_key(csuIndex);
+ var data = xdb.read_entry(csu);
+ var json = JSON.parse(data.readString());
+ processCSU(csu.readString(), json[0]);
+
+ xdb.free_string(csu);
+ xdb.free_string(data);
+}
+
+xdb.open("src_body.xdb");
+
+printErr("Finished loading data structures");
+
+var minStream = xdb.min_data_stream();
+var maxStream = xdb.max_data_stream();
+
+if (theFunctionNameToFind) {
+ var index = xdb.lookup_key(theFunctionNameToFind);
+ if (!index) {
+ printErr("Function not found");
+ quit(1);
+ }
+ minStream = maxStream = index;
+}
+
+function process(functionName, functionBodies)
+{
+ for (var body of functionBodies)
+ body.suppressed = [];
+
+ for (var body of functionBodies) {
+ for (var [pbody, id] of allRAIIGuardedCallPoints(functionBodies, body, isSuppressConstructor))
+ pbody.suppressed[id] = true;
+ }
+
+ for (var body of functionBodies)
+ processBody(functionName, body);
+
+ // GCC generates multiple constructors and destructors ("in-charge" and
+ // "not-in-charge") to handle virtual base classes. They are normally
+ // identical, and it appears that GCC does some magic to alias them to the
+ // same thing. But this aliasing is not visible to the analysis. So we'll
+ // add a dummy call edge from "foo" -> "foo *INTERNAL* ", since only "foo"
+ // will show up as called but only "foo *INTERNAL* " will be emitted in the
+ // case where the constructors are identical.
+ //
+ // This is slightly conservative in the case where they are *not*
+ // identical, but that should be rare enough that we don't care.
+ var markerPos = functionName.indexOf(internalMarker);
+ if (markerPos > 0) {
+ var inChargeXTor = functionName.replace(internalMarker, "");
+ print("D " + memo(inChargeXTor) + " " + memo(functionName));
+
+ // Bug 1056410: Oh joy. GCC does something even funkier internally,
+ // where it generates calls to ~Foo() but a body for ~Foo(int32) even
+ // though it uses the same mangled name for both. So we need to add a
+ // synthetic edge from ~Foo() -> ~Foo(int32).
+ //
+ // inChargeXTor will have the (int32).
+ if (functionName.indexOf("::~") > 0) {
+ var calledDestructor = inChargeXTor.replace("(int32)", "()");
+ print("D " + memo(calledDestructor) + " " + memo(inChargeXTor));
+ }
+ }
+
+ // Further note: from http://mentorembedded.github.io/cxx-abi/abi.html the
+ // different kinds of constructors/destructors are:
+ // C1 # complete object constructor
+ // C2 # base object constructor
+ // C3 # complete object allocating constructor
+ // D0 # deleting destructor
+ // D1 # complete object destructor
+ // D2 # base object destructor
+ //
+ // In actual practice, I have observed C4 and D4 xtors generated by gcc
+ // 4.9.3 (but not 4.7.3). The gcc source code says:
+ //
+ // /* This is the old-style "[unified]" constructor.
+ // In some cases, we may emit this function and call
+ // it from the clones in order to share code and save space. */
+ //
+ // Unfortunately, that "call... from the clones" does not seem to appear in
+ // the CFG we get from GCC. So if we see a C4 constructor or D4 destructor,
+ // inject an edge to it from C1, C2, and C3 (or D1, D2, and D3). (Note that
+ // C3 isn't even used in current GCC, but add the edge anyway just in
+ // case.)
+ if (functionName.indexOf("C4E") != -1 || functionName.indexOf("D4Ev") != -1) {
+ var [ mangled, unmangled ] = splitFunction(functionName);
+ // E terminates the method name (and precedes the method parameters).
+ // If eg "C4E" shows up in the mangled name for another reason, this
+ // will create bogus edges in the callgraph. But will affect little and
+ // is somewhat difficult to avoid, so we will live with it.
+ for (let [synthetic, variant] of [['C4E', 'C1E'],
+ ['C4E', 'C2E'],
+ ['C4E', 'C3E'],
+ ['D4Ev', 'D1Ev'],
+ ['D4Ev', 'D2Ev'],
+ ['D4Ev', 'D3Ev']])
+ {
+ if (mangled.indexOf(synthetic) == -1)
+ continue;
+
+ let variant_mangled = mangled.replace(synthetic, variant);
+ let variant_full = variant_mangled + "$" + unmangled;
+ print("D " + memo(variant_full) + " " + memo(functionName));
+ }
+ }
+}
+
+for (var nameIndex = minStream; nameIndex <= maxStream; nameIndex++) {
+ var name = xdb.read_key(nameIndex);
+ var data = xdb.read_entry(name);
+ process(name.readString(), JSON.parse(data.readString()));
+ xdb.free_string(name);
+ xdb.free_string(data);
+}
diff --git a/js/src/devtools/rootAnalysis/computeGCFunctions.js b/js/src/devtools/rootAnalysis/computeGCFunctions.js
new file mode 100644
index 000000000..97efcb38a
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/computeGCFunctions.js
@@ -0,0 +1,69 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+loadRelativeToScript('utility.js');
+loadRelativeToScript('annotations.js');
+loadRelativeToScript('loadCallgraph.js');
+
+if (typeof scriptArgs[0] != 'string')
+ throw "Usage: computeGCFunctions.js <callgraph.txt> <out:gcFunctions.txt> <out:gcFunctions.lst> <out:gcEdges.txt> <out:suppressedFunctions.lst>";
+
+var start = "Time: " + new Date;
+
+var callgraph_filename = scriptArgs[0];
+var gcFunctions_filename = scriptArgs[1] || "gcFunctions.txt";
+var gcFunctionsList_filename = scriptArgs[2] || "gcFunctions.lst";
+var gcEdges_filename = scriptArgs[3] || "gcEdges.txt";
+var suppressedFunctionsList_filename = scriptArgs[4] || "suppressedFunctions.lst";
+
+loadCallgraph(callgraph_filename);
+
+printErr("Writing " + gcFunctions_filename);
+redirect(gcFunctions_filename);
+
+for (var name in gcFunctions) {
+ for (let readable of readableNames[name]) {
+ print("");
+ print("GC Function: " + name + "$" + readable);
+ let current = name;
+ do {
+ current = gcFunctions[current];
+ if (current in readableNames)
+ print(" " + readableNames[current][0]);
+ else
+ print(" " + current);
+ } while (current in gcFunctions);
+ }
+}
+
+printErr("Writing " + gcFunctionsList_filename);
+redirect(gcFunctionsList_filename);
+for (var name in gcFunctions) {
+ for (var readable of readableNames[name])
+ print(name + "$" + readable);
+}
+
+// gcEdges is a list of edges that can GC for more specific reasons than just
+// calling a function that is in gcFunctions.txt.
+//
+// Right now, it is unused. It was meant for ~AutoCompartment when it might
+// wrap an exception, but anything held live across ~AC will have to be held
+// live across the corresponding constructor (and hence the whole scope of the
+// AC), and in that case it'll be held live across whatever could create an
+// exception within the AC scope. So ~AC edges are redundant. I will leave the
+// stub machinery here for now.
+printErr("Writing " + gcEdges_filename);
+redirect(gcEdges_filename);
+for (var block in gcEdges) {
+ for (var edge in gcEdges[block]) {
+ var func = gcEdges[block][edge];
+ print([ block, edge, func ].join(" || "));
+ }
+}
+
+printErr("Writing " + suppressedFunctionsList_filename);
+redirect(suppressedFunctionsList_filename);
+for (var name in suppressedFunctions) {
+ print(name);
+}
diff --git a/js/src/devtools/rootAnalysis/computeGCTypes.js b/js/src/devtools/rootAnalysis/computeGCTypes.js
new file mode 100644
index 000000000..af4d70389
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/computeGCTypes.js
@@ -0,0 +1,299 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+loadRelativeToScript('utility.js');
+loadRelativeToScript('annotations.js');
+
+var gcTypes_filename = scriptArgs[0] || "gcTypes.txt";
+var typeInfo_filename = scriptArgs[1] || "typeInfo.txt";
+
+var annotations = {
+ 'GCPointers': [],
+ 'GCThings': [],
+ 'NonGCTypes': {}, // unused
+ 'NonGCPointers': {},
+ 'RootedPointers': {},
+ 'GCSuppressors': {},
+};
+
+var gDescriptors = new Map; // Map from descriptor string => Set of typeName
+
+var structureParents = {}; // Map from field => list of <parent, fieldName>
+var pointerParents = {}; // Map from field => list of <parent, fieldName>
+var baseClasses = {}; // Map from struct name => list of base class name strings
+
+var gcTypes = {}; // map from parent struct => Set of GC typed children
+var gcPointers = {}; // map from parent struct => Set of GC typed children
+var gcFields = new Map;
+
+var rootedPointers = {};
+
+function processCSU(csu, body)
+{
+ for (let { 'Base': base } of (body.CSUBaseClass || []))
+ addBaseClass(csu, base);
+
+ for (let field of (body.DataField || [])) {
+ var type = field.Field.Type;
+ var fieldName = field.Field.Name[0];
+ if (type.Kind == "Pointer") {
+ var target = type.Type;
+ if (target.Kind == "CSU")
+ addNestedPointer(csu, target.Name, fieldName);
+ }
+ if (type.Kind == "Array") {
+ var target = type.Type;
+ if (target.Kind == "CSU")
+ addNestedStructure(csu, target.Name, fieldName);
+ }
+ if (type.Kind == "CSU") {
+ // Ignore nesting in classes which are AutoGCRooters. We only consider
+ // types with fields that may not be properly rooted.
+ if (type.Name == "JS::AutoGCRooter" || type.Name == "JS::CustomAutoRooter")
+ return;
+ addNestedStructure(csu, type.Name, fieldName);
+ }
+ }
+
+ for (let { 'Name': [ annType, tag ] } of (body.Annotation || [])) {
+ if (annType != 'Tag')
+ continue;
+
+ if (tag == 'GC Pointer')
+ annotations.GCPointers.push(csu);
+ else if (tag == 'Invalidated by GC')
+ annotations.GCPointers.push(csu);
+ else if (tag == 'GC Thing')
+ annotations.GCThings.push(csu);
+ else if (tag == 'Suppressed GC Pointer')
+ annotations.NonGCPointers[csu] = true;
+ else if (tag == 'Rooted Pointer')
+ annotations.RootedPointers[csu] = true;
+ else if (tag == 'Suppress GC')
+ annotations.GCSuppressors[csu] = true;
+ }
+}
+
+// csu.field is of type inner
+function addNestedStructure(csu, inner, field)
+{
+ if (!(inner in structureParents))
+ structureParents[inner] = [];
+
+ if (field.match(/^field:\d+$/) && (csu in baseClasses) && (baseClasses[csu].indexOf(inner) != -1))
+ return;
+
+ structureParents[inner].push([ csu, field ]);
+}
+
+function addBaseClass(csu, base) {
+ if (!(csu in baseClasses))
+ baseClasses[csu] = [];
+ baseClasses[csu].push(base);
+ var k = baseClasses[csu].length;
+ addNestedStructure(csu, base, `<base-${k}>`);
+}
+
+function addNestedPointer(csu, inner, field)
+{
+ if (!(inner in pointerParents))
+ pointerParents[inner] = [];
+ pointerParents[inner].push([ csu, field ]);
+}
+
+var xdb = xdbLibrary();
+xdb.open("src_comp.xdb");
+
+var minStream = xdb.min_data_stream();
+var maxStream = xdb.max_data_stream();
+
+for (var csuIndex = minStream; csuIndex <= maxStream; csuIndex++) {
+ var csu = xdb.read_key(csuIndex);
+ var data = xdb.read_entry(csu);
+ var json = JSON.parse(data.readString());
+ assert(json.length == 1);
+ processCSU(csu.readString(), json[0]);
+
+ xdb.free_string(csu);
+ xdb.free_string(data);
+}
+
+// Now that we have the whole hierarchy set up, add all the types and propagate
+// info.
+for (let csu of annotations.GCThings)
+ addGCType(csu);
+for (let csu of annotations.GCPointers)
+ addGCPointer(csu);
+
+function stars(n) { return n ? '*' + stars(n-1) : '' };
+
+// "typeName is a (pointer to a)^'typePtrLevel' GC type because it contains a field
+// named 'child' of type 'why' (or pointer to 'why' if fieldPtrLevel == 1), which is
+// itself a GCThing or GCPointer."
+function markGCType(typeName, child, why, typePtrLevel, fieldPtrLevel, indent)
+{
+ //printErr(`${indent}${typeName}${stars(typePtrLevel)} may be a gctype/ptr because of its child '${child}' of type ${why}${stars(fieldPtrLevel)}`);
+
+ // Some types, like UniquePtr, do not mark/trace/relocate their contained
+ // pointers and so should not hold them live across a GC. UniquePtr in
+ // particular should be the only thing pointing to a structure containing a
+ // GCPointer, so nothing else can possibly trace it and it'll die when the
+ // UniquePtr goes out of scope. So we say that memory pointed to by a
+ // UniquePtr is just as unsafe as the stack for storing GC pointers.
+ if (!fieldPtrLevel && isUnsafeStorage(typeName)) {
+ // The UniquePtr itself is on the stack but when you dereference the
+ // contained pointer, you get to the unsafe memory that we are treating
+ // as if it were the stack (aka ptrLevel 0). Note that
+ // UniquePtr<UniquePtr<JSObject*>> is fine, so we don't want to just
+ // hardcode the ptrLevel.
+ fieldPtrLevel = -1;
+ }
+
+ // Example: with:
+ // struct Pair { JSObject* foo; int bar; };
+ // struct { Pair** info }***
+ // make a call to:
+ // child='info' typePtrLevel=3 fieldPtrLevel=2
+ // for a final ptrLevel of 5, used to later call:
+ // child='foo' typePtrLevel=5 fieldPtrLevel=1
+ //
+ var ptrLevel = typePtrLevel + fieldPtrLevel;
+
+ // ...except when > 2 levels of pointers away from an actual GC thing, stop
+ // searching the graph. (This would just be > 1, except that a UniquePtr
+ // field might still have a GC pointer.)
+ if (ptrLevel > 2)
+ return;
+
+ if (ptrLevel == 0 && isRootedGCTypeName(typeName))
+ return;
+ if (ptrLevel == 1 && isRootedGCPointerTypeName(typeName))
+ return;
+
+ if (ptrLevel == 0) {
+ if (typeName in annotations.NonGCTypes)
+ return;
+ if (!(typeName in gcTypes))
+ gcTypes[typeName] = new Set();
+ gcTypes[typeName].add(why);
+ } else if (ptrLevel == 1) {
+ if (typeName in annotations.NonGCPointers)
+ return;
+ if (!(typeName in gcPointers))
+ gcPointers[typeName] = new Set();
+ gcPointers[typeName].add(why);
+ }
+
+ if (ptrLevel < 2) {
+ if (!gcFields.has(typeName))
+ gcFields.set(typeName, new Map());
+ gcFields.get(typeName).set(child, [ why, fieldPtrLevel ]);
+ }
+
+ if (typeName in structureParents) {
+ for (var field of structureParents[typeName]) {
+ var [ holderType, fieldName ] = field;
+ markGCType(holderType, fieldName, typeName, ptrLevel, 0, indent + " ");
+ }
+ }
+ if (typeName in pointerParents) {
+ for (var field of pointerParents[typeName]) {
+ var [ holderType, fieldName ] = field;
+ markGCType(holderType, fieldName, typeName, ptrLevel, 1, indent + " ");
+ }
+ }
+}
+
+function addGCType(typeName, child, why, depth, fieldPtrLevel)
+{
+ markGCType(typeName, '<annotation>', '(annotation)', 0, 0, "");
+}
+
+function addGCPointer(typeName)
+{
+ markGCType(typeName, '<pointer-annotation>', '(annotation)', 1, 0, "");
+}
+
+// Add an arbitrary descriptor to a type, and apply it recursively to all base
+// structs and structs that contain the given typeName as a field.
+function addDescriptor(typeName, descriptor)
+{
+ if (!gDescriptors.has(descriptor))
+ gDescriptors.set(descriptor, new Set);
+ let descriptorTypes = gDescriptors.get(descriptor);
+ if (!descriptorTypes.has(typeName)) {
+ descriptorTypes.add(typeName);
+ if (typeName in structureParents) {
+ for (let [holder, field] of structureParents[typeName])
+ addDescriptor(holder, descriptor);
+ }
+ if (typeName in baseClasses) {
+ for (let base of baseClasses[typeName])
+ addDescriptor(base, descriptor);
+ }
+ }
+}
+
+for (var type of listNonGCPointers())
+ annotations.NonGCPointers[type] = true;
+
+function explain(csu, indent, seen) {
+ if (!seen)
+ seen = new Set();
+ seen.add(csu);
+ if (!gcFields.has(csu))
+ return;
+ var fields = gcFields.get(csu);
+
+ if (fields.has('<annotation>')) {
+ print(indent + "which is annotated as a GCThing");
+ return;
+ }
+ if (fields.has('<pointer-annotation>')) {
+ print(indent + "which is annotated as a GCPointer");
+ return;
+ }
+ for (var [ field, [ child, ptrdness ] ] of fields) {
+ var msg = indent;
+ if (field[0] == '<')
+ msg += "inherits from ";
+ else {
+ msg += "contains field '" + field + "' ";
+ if (ptrdness == -1)
+ msg += "(with a pointer to unsafe storage) holding a ";
+ else if (ptrdness == 0)
+ msg += "of type ";
+ else
+ msg += "pointing to type ";
+ }
+ msg += child;
+ print(msg);
+ if (!seen.has(child))
+ explain(child, indent + " ", seen);
+ }
+}
+
+var origOut = os.file.redirect(gcTypes_filename);
+
+for (var csu in gcTypes) {
+ print("GCThing: " + csu);
+ explain(csu, " ");
+}
+for (var csu in gcPointers) {
+ print("GCPointer: " + csu);
+ explain(csu, " ");
+}
+
+// Redirect output to the typeInfo file and close the gcTypes file.
+os.file.close(os.file.redirect(typeInfo_filename));
+
+for (let csu in annotations.GCSuppressors)
+ addDescriptor(csu, 'Suppress GC');
+
+for (let [descriptor, types] of gDescriptors) {
+ for (let csu of types)
+ print(descriptor + "$$" + csu);
+}
+
+os.file.close(os.file.redirect(origOut));
diff --git a/js/src/devtools/rootAnalysis/expect.b2g.json b/js/src/devtools/rootAnalysis/expect.b2g.json
new file mode 100644
index 000000000..06f2beb36
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/expect.b2g.json
@@ -0,0 +1,3 @@
+{
+ "expect-hazards": 0
+}
diff --git a/js/src/devtools/rootAnalysis/expect.browser.json b/js/src/devtools/rootAnalysis/expect.browser.json
new file mode 100644
index 000000000..06f2beb36
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/expect.browser.json
@@ -0,0 +1,3 @@
+{
+ "expect-hazards": 0
+}
diff --git a/js/src/devtools/rootAnalysis/expect.shell.json b/js/src/devtools/rootAnalysis/expect.shell.json
new file mode 100644
index 000000000..06f2beb36
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/expect.shell.json
@@ -0,0 +1,3 @@
+{
+ "expect-hazards": 0
+}
diff --git a/js/src/devtools/rootAnalysis/explain.py b/js/src/devtools/rootAnalysis/explain.py
new file mode 100755
index 000000000..dc8b76f5c
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/explain.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+import re
+import argparse
+
+from collections import defaultdict
+
+parser = argparse.ArgumentParser(description='Process some integers.')
+parser.add_argument('rootingHazards', nargs='?', default='rootingHazards.txt')
+parser.add_argument('gcFunctions', nargs='?', default='gcFunctions.txt')
+parser.add_argument('hazards', nargs='?', default='hazards.txt')
+parser.add_argument('extra', nargs='?', default='unnecessary.txt')
+parser.add_argument('refs', nargs='?', default='refs.txt')
+args = parser.parse_args()
+
+num_hazards = 0
+num_refs = 0
+try:
+ with open(args.rootingHazards) as rootingHazards, \
+ open(args.hazards, 'w') as hazards, \
+ open(args.extra, 'w') as extra, \
+ open(args.refs, 'w') as refs:
+ current_gcFunction = None
+
+ # Map from a GC function name to the list of hazards resulting from
+ # that GC function
+ hazardousGCFunctions = defaultdict(list)
+
+ # List of tuples (gcFunction, index of hazard) used to maintain the
+ # ordering of the hazards
+ hazardOrder = []
+
+ for line in rootingHazards:
+ m = re.match(r'^Time: (.*)', line)
+ mm = re.match(r'^Run on:', line)
+ if m or mm:
+ print >>hazards, line
+ print >>extra, line
+ print >>refs, line
+ continue
+
+ m = re.match(r'^Function.*has unnecessary root', line)
+ if m:
+ print >>extra, line
+ continue
+
+ m = re.match(r'^Function.*takes unsafe address of unrooted', line)
+ if m:
+ num_refs += 1
+ print >>refs, line
+ continue
+
+ m = re.match(r"^Function.*has unrooted.*of type.*live across GC call ('?)(.*?)('?) at \S+:\d+$", line)
+ if m:
+ # Function names are surrounded by single quotes. Field calls
+ # are unquoted.
+ current_gcFunction = m.group(2)
+ hazardousGCFunctions[current_gcFunction].append(line)
+ hazardOrder.append((current_gcFunction, len(hazardousGCFunctions[current_gcFunction]) - 1))
+ num_hazards += 1
+ continue
+
+ if current_gcFunction:
+ if not line.strip():
+ # Blank line => end of this hazard
+ current_gcFunction = None
+ else:
+ hazardousGCFunctions[current_gcFunction][-1] += line
+
+ with open(args.gcFunctions) as gcFunctions:
+ gcExplanations = {} # gcFunction => stack showing why it can GC
+
+ current_func = None
+ explanation = None
+ for line in gcFunctions:
+ m = re.match(r'^GC Function: (.*)', line)
+ if m:
+ if current_func:
+ gcExplanations[current_func] = explanation
+ current_func = None
+ if m.group(1) in hazardousGCFunctions:
+ current_func = m.group(1)
+ explanation = line
+ elif current_func:
+ explanation += line
+ if current_func:
+ gcExplanations[current_func] = explanation
+
+ for gcFunction, index in hazardOrder:
+ gcHazards = hazardousGCFunctions[gcFunction]
+
+ if gcFunction in gcExplanations:
+ print >>hazards, (gcHazards[index] + gcExplanations[gcFunction])
+ else:
+ print >>hazards, gcHazards[index]
+
+except IOError as e:
+ print 'Failed: %s' % str(e)
+
+print("Wrote %s" % args.hazards)
+print("Wrote %s" % args.extra)
+print("Wrote %s" % args.refs)
+print("Found %d hazards and %d unsafe references" % (num_hazards, num_refs))
diff --git a/js/src/devtools/rootAnalysis/gen-hazards.sh b/js/src/devtools/rootAnalysis/gen-hazards.sh
new file mode 100755
index 000000000..7007969a1
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/gen-hazards.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+set -e
+
+JOBS="$1"
+
+for j in $(seq $JOBS); do
+ env PATH=$PATH:$SIXGILL/bin XDB=$SIXGILL/bin/xdb.so $JS $ANALYZE gcFunctions.lst suppressedFunctions.lst gcTypes.txt $j $JOBS tmp.$j > rootingHazards.$j &
+done
+
+wait
+
+for j in $(seq $JOBS); do
+ cat rootingHazards.$j
+done
diff --git a/js/src/devtools/rootAnalysis/loadCallgraph.js b/js/src/devtools/rootAnalysis/loadCallgraph.js
new file mode 100644
index 000000000..9ee8d7628
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/loadCallgraph.js
@@ -0,0 +1,203 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+loadRelativeToScript('utility.js');
+
+// Functions come out of sixgill in the form "mangled|readable". The mangled
+// name is Truth. One mangled name might correspond to multiple readable names,
+// for multiple reasons, including (1) sixgill/gcc doesn't always qualify types
+// the same way or de-typedef the same amount; (2) sixgill's output treats
+// references and pointers the same, and so doesn't distinguish them, but C++
+// treats them as separate for overloading and linking; (3) (identical)
+// destructors sometimes have an int32 parameter, sometimes not.
+//
+// The readable names are useful because they're far more meaningful to the
+// user, and are what should show up in reports and questions to mrgiggles. At
+// least in most cases, it's fine to have the extra mangled name tacked onto
+// the beginning for these.
+//
+// The strategy used is to separate out the pieces whenever they are read in,
+// create a table mapping mangled names to (one of the) readable names, and
+// use the mangled names in all computation.
+//
+// Note that callgraph.txt uses a compressed representation -- each name is
+// mapped to an integer, and those integers are what is recorded in the edges.
+// But the integers depend on the full name, whereas the true edge should only
+// consider the mangled name. And some of the names encoded in callgraph.txt
+// are FieldCalls, not just function names.
+
+var readableNames = {}; // map from mangled name => list of readable names
+var mangledName = {}; // map from demangled names => mangled names. Could be eliminated.
+var calleeGraph = {}; // map from mangled => list of tuples of {'callee':mangled, 'suppressed':bool}
+var callerGraph = {}; // map from mangled => list of tuples of {'caller':mangled, 'suppressed':bool}
+var gcFunctions = {}; // map from mangled callee => reason
+var suppressedFunctions = {}; // set of mangled names (map from mangled name => true)
+var gcEdges = {};
+
+function addGCFunction(caller, reason)
+{
+ if (caller in suppressedFunctions)
+ return false;
+
+ if (ignoreGCFunction(caller))
+ return false;
+
+ if (!(caller in gcFunctions)) {
+ gcFunctions[caller] = reason;
+ return true;
+ }
+
+ return false;
+}
+
+function addCallEdge(caller, callee, suppressed)
+{
+ addToKeyedList(calleeGraph, caller, {callee:callee, suppressed:suppressed});
+ addToKeyedList(callerGraph, callee, {caller:caller, suppressed:suppressed});
+}
+
+// Map from identifier to full "mangled|readable" name. Or sometimes to a
+// Class.Field name.
+var functionNames = [""];
+
+// Map from identifier to mangled name (or to a Class.Field)
+var idToMangled = [""];
+
+function loadCallgraph(file)
+{
+ var suppressedFieldCalls = {};
+ var resolvedFunctions = {};
+
+ var numGCCalls = 0;
+
+ for (var line of readFileLines_gen(file)) {
+ line = line.replace(/\n/, "");
+
+ var match;
+ if (match = line.charAt(0) == "#" && /^\#(\d+) (.*)/.exec(line)) {
+ assert(functionNames.length == match[1]);
+ functionNames.push(match[2]);
+ var [ mangled, readable ] = splitFunction(match[2]);
+ if (mangled in readableNames)
+ readableNames[mangled].push(readable);
+ else
+ readableNames[mangled] = [ readable ];
+ mangledName[readable] = mangled;
+ idToMangled.push(mangled);
+ continue;
+ }
+ var suppressed = false;
+ if (line.indexOf("SUPPRESS_GC") != -1) {
+ match = /^(..)SUPPRESS_GC (.*)/.exec(line);
+ line = match[1] + match[2];
+ suppressed = true;
+ }
+ var tag = line.charAt(0);
+ if (match = tag == 'I' && /^I (\d+) VARIABLE ([^\,]*)/.exec(line)) {
+ var mangledCaller = idToMangled[match[1]];
+ var name = match[2];
+ if (!indirectCallCannotGC(functionNames[match[1]], name) && !suppressed)
+ addGCFunction(mangledCaller, "IndirectCall: " + name);
+ } else if (match = tag == 'F' && /^F (\d+) CLASS (.*?) FIELD (.*)/.exec(line)) {
+ var caller = idToMangled[match[1]];
+ var csu = match[2];
+ var fullfield = csu + "." + match[3];
+ if (suppressed)
+ suppressedFieldCalls[fullfield] = true;
+ else if (!fieldCallCannotGC(csu, fullfield))
+ addGCFunction(caller, "FieldCall: " + fullfield);
+ } else if (match = tag == 'D' && /^D (\d+) (\d+)/.exec(line)) {
+ var caller = idToMangled[match[1]];
+ var callee = idToMangled[match[2]];
+ addCallEdge(caller, callee, suppressed);
+ } else if (match = tag == 'R' && /^R (\d+) (\d+)/.exec(line)) {
+ var callerField = idToMangled[match[1]];
+ var callee = idToMangled[match[2]];
+ addCallEdge(callerField, callee, false);
+ resolvedFunctions[callerField] = true;
+ } else if (match = tag == 'T' && /^T (\d+) (.*)/.exec(line)) {
+ var mangled = idToMangled[match[1]];
+ var tag = match[2];
+ if (tag == 'GC Call') {
+ addGCFunction(mangled, "GC");
+ numGCCalls++;
+ }
+ }
+ }
+
+ // mess up the id <-> name correspondence. Also, we need to know if the
+ // functions even exist in the first place.)
+ for (var func of extraGCFunctions()) {
+ addGCFunction(func, "annotation");
+ }
+
+ // Initialize suppressedFunctions to the set of all functions, and the
+ // worklist to all toplevel callers.
+ var worklist = [];
+ for (var callee in callerGraph)
+ suppressedFunctions[callee] = true;
+ for (var caller in calleeGraph) {
+ if (!(caller in callerGraph)) {
+ suppressedFunctions[caller] = true;
+ worklist.push(caller);
+ }
+ }
+
+ // Find all functions reachable via an unsuppressed call chain, and remove
+ // them from the suppressedFunctions set. Everything remaining is only
+ // reachable when GC is suppressed.
+ var top = worklist.length;
+ while (top > 0) {
+ name = worklist[--top];
+ if (!(name in suppressedFunctions))
+ continue;
+ delete suppressedFunctions[name];
+ if (!(name in calleeGraph))
+ continue;
+ for (var entry of calleeGraph[name]) {
+ if (!entry.suppressed)
+ worklist[top++] = entry.callee;
+ }
+ }
+
+ // Such functions are known to not GC.
+ for (var name in gcFunctions) {
+ if (name in suppressedFunctions)
+ delete gcFunctions[name];
+ }
+
+ for (var name in suppressedFieldCalls) {
+ suppressedFunctions[name] = true;
+ }
+
+ // Sanity check to make sure the callgraph has some functions annotated as
+ // GC Calls. This is mostly a check to be sure the earlier processing
+ // succeeded (as opposed to, say, running on empty xdb files because you
+ // didn't actually compile anything interesting.)
+ assert(numGCCalls > 0, "No GC functions found!");
+
+ // Initialize the worklist to all known gcFunctions.
+ var worklist = [];
+ for (var name in gcFunctions)
+ worklist.push(name);
+
+ // Recursively find all callers and add them to the set of gcFunctions.
+ while (worklist.length) {
+ name = worklist.shift();
+ assert(name in gcFunctions);
+ if (!(name in callerGraph))
+ continue;
+ for (var entry of callerGraph[name]) {
+ if (!entry.suppressed && addGCFunction(entry.caller, name))
+ worklist.push(entry.caller);
+ }
+ }
+
+ // Any field call that has been resolved to all possible callees can be
+ // trusted to not GC if all of those callees are known to not GC.
+ for (var name in resolvedFunctions) {
+ if (!(name in gcFunctions))
+ suppressedFunctions[name] = true;
+ }
+}
diff --git a/js/src/devtools/rootAnalysis/run-analysis.sh b/js/src/devtools/rootAnalysis/run-analysis.sh
new file mode 100755
index 000000000..bdfab6e68
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/run-analysis.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+SRCDIR=$(cd $(dirname $0)/../../../..; pwd)
+GECKO_DIR=$SRCDIR $SRCDIR/taskcluster/scripts/builder/build-haz-linux.sh $(pwd) "$@"
diff --git a/js/src/devtools/rootAnalysis/run-test.py b/js/src/devtools/rootAnalysis/run-test.py
new file mode 100644
index 000000000..3bc9085a0
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/run-test.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import site
+import subprocess
+import argparse
+
+testdir = os.path.abspath(os.path.join(os.path.dirname(__file__), 't'))
+site.addsitedir(testdir)
+from testlib import Test, equal
+
+scriptdir = os.path.abspath(os.path.dirname(__file__))
+
+parser = argparse.ArgumentParser(description='run hazard analysis tests')
+parser.add_argument(
+ '--js', default=os.environ.get('JS'),
+ help='JS binary to run the tests with')
+parser.add_argument(
+ '--sixgill', default=os.environ.get('SIXGILL', os.path.join(testdir, "sixgill")),
+ help='Path to root of sixgill installation')
+parser.add_argument(
+ '--sixgill-bin', default=os.environ.get('SIXGILL_BIN'),
+ help='Path to sixgill binary dir')
+parser.add_argument(
+ '--sixgill-plugin', default=os.environ.get('SIXGILL_PLUGIN'),
+ help='Full path to sixgill gcc plugin')
+parser.add_argument(
+ '--gccdir', default=os.environ.get('GCCDIR'),
+ help='Path to GCC installation dir')
+parser.add_argument(
+ '--cc', default=os.environ.get('CC'),
+ help='Path to gcc')
+parser.add_argument(
+ '--cxx', default=os.environ.get('CXX'),
+ help='Path to g++')
+parser.add_argument(
+ '--verbose', '-v', action='store_true',
+ help='Display verbose output, including commands executed')
+parser.add_argument(
+ 'tests', nargs='*', default=['sixgill-tree', 'suppression', 'hazards', 'exceptions'],
+ help='tests to run')
+
+cfg = parser.parse_args()
+
+if not cfg.js:
+ exit('Must specify JS binary through environment variable or --js option')
+if not cfg.cc:
+ if cfg.gccdir:
+ cfg.cc = os.path.join(cfg.gccdir, "bin", "gcc")
+ else:
+ cfg.cc = "gcc"
+if not cfg.cxx:
+ if cfg.gccdir:
+ cfg.cxx = os.path.join(cfg.gccdir, "bin", "g++")
+ else:
+ cfg.cxx = "g++"
+if not cfg.sixgill_bin:
+ cfg.sixgill_bin = os.path.join(cfg.sixgill, "usr", "bin")
+if not cfg.sixgill_plugin:
+ cfg.sixgill_plugin = os.path.join(cfg.sixgill, "usr", "libexec", "sixgill", "gcc", "xgill.so")
+
+subprocess.check_call([cfg.js, '-e', 'if (!getBuildConfiguration()["has-ctypes"]) quit(1)'])
+
+def binpath(prog):
+ return os.path.join(cfg.sixgill_bin, prog)
+
+try:
+ os.mkdir(os.path.join('t', 'out'))
+except OSError:
+ pass
+
+for name in cfg.tests:
+ name = os.path.basename(name)
+ indir = os.path.join(testdir, name)
+ outdir = os.path.join(testdir, 'out', name)
+ try:
+ os.mkdir(outdir)
+ except OSError:
+ pass
+
+ test = Test(indir, outdir, cfg)
+
+ os.chdir(outdir)
+ subprocess.call(["sh", "-c", "rm *.xdb"])
+ execfile(os.path.join(indir, "test.py"), {'test': test, 'equal': equal})
+ print("TEST-PASSED: %s" % name)
diff --git a/js/src/devtools/rootAnalysis/run_complete b/js/src/devtools/rootAnalysis/run_complete
new file mode 100755
index 000000000..b1fbadb81
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/run_complete
@@ -0,0 +1,380 @@
+#!/usr/bin/perl
+
+# Sixgill: Static assertion checker for C/C++ programs.
+# Copyright (C) 2009-2010 Stanford University
+# Author: Brian Hackett
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# do a complete run of the system from raw source to reports. this requires
+# various run_monitor processes to be running in the background (maybe on other
+# machines) and watching a shared poll_file for jobs. if the output directory
+# for this script already exists then an incremental analysis will be performed
+# and the reports will only reflect the changes since the earlier run.
+
+use strict;
+use warnings;
+use IO::Handle;
+use File::Basename qw(dirname);
+use Getopt::Long;
+use Cwd;
+
+#################################
+# environment specific settings #
+#################################
+
+my $WORKDIR;
+my $SIXGILL_BIN;
+
+# poll file shared with the run_monitor script.
+my $poll_file;
+
+# root directory of the project.
+my $build_dir;
+
+# directory containing gcc wrapper scripts.
+my $wrap_dir;
+
+# optional file with annotations from the web interface.
+my $ann_file = "";
+
+# optional output directory to do a diff against.
+my $old_dir = "";
+
+# run in the foreground
+my $foreground;
+
+my $builder = "make -j4";
+
+my $suppress_logs;
+GetOptions("build-root|b=s" => \$build_dir,
+ "poll-file=s" => \$poll_file,
+ "no-logs!" => \$suppress_logs,
+ "work-dir=s" => \$WORKDIR,
+ "sixgill-binaries|binaries|b=s" => \$SIXGILL_BIN,
+ "wrap-dir=s" => \$wrap_dir,
+ "annotations-file|annotations|a=s" => \$ann_file,
+ "old-dir|old=s" => \$old_dir,
+ "foreground!" => \$foreground,
+ "buildcommand=s" => \$builder,
+ )
+ or die;
+
+if (not -d $build_dir) {
+ mkdir($build_dir);
+}
+if ($old_dir ne "" && not -d $old_dir) {
+ die "Old directory '$old_dir' does not exist\n";
+}
+
+$WORKDIR ||= "sixgill-work";
+mkdir($WORKDIR, 0755) if ! -d $WORKDIR;
+$poll_file ||= "$WORKDIR/poll.file";
+$build_dir ||= "$WORKDIR/js-inbound-xgill";
+
+if (!defined $SIXGILL_BIN) {
+ chomp(my $path = `which xmanager`);
+ if ($path) {
+ use File::Basename qw(dirname);
+ $SIXGILL_BIN = dirname($path);
+ } else {
+ die "Cannot find sixgill binaries. Use the -b option.";
+ }
+}
+
+$wrap_dir ||= "$WORKDIR/xgill-inbound/wrap_gcc";
+$wrap_dir = "$SIXGILL_BIN/../scripts/wrap_gcc" if not (-e "$wrap_dir/basecc");
+die "Bad wrapper directory: $wrap_dir" if not (-e "$wrap_dir/basecc");
+
+# code to clean the project from $build_dir.
+sub clean_project {
+ system("make clean");
+}
+
+# code to build the project from $build_dir.
+sub build_project {
+ return system($builder) >> 8;
+}
+
+our %kill_on_exit;
+END {
+ for my $pid (keys %kill_on_exit) {
+ kill($pid);
+ }
+}
+
+# commands to start the various xgill binaries. timeouts can be specified
+# for the backend analyses here, and a memory limit can be specified for
+# xmanager if desired (and USE_COUNT_ALLOCATOR is defined in util/alloc.h).
+my $xmanager = "$SIXGILL_BIN/xmanager";
+my $xsource = "$SIXGILL_BIN/xsource";
+my $xmemlocal = "$SIXGILL_BIN/xmemlocal -timeout=20";
+my $xinfer = "$SIXGILL_BIN/xinfer -timeout=60";
+my $xcheck = "$SIXGILL_BIN/xcheck -timeout=30";
+
+# prefix directory to strip off source files.
+my $prefix_dir = $build_dir;
+
+##########################
+# general purpose script #
+##########################
+
+# Prevent ccache from being used. I don't think this does any good. The problem
+# I'm struggling with is that if autoconf.mk still has 'ccache gcc' in it, the
+# builds fail in a mysterious way.
+$ENV{CCACHE_COMPILERCHECK} = 'date +%s.%N';
+delete $ENV{CCACHE_PREFIX};
+
+my $usage = "USAGE: run_complete result-dir\n";
+my $result_dir = shift or die $usage;
+
+if (not $foreground) {
+ my $pid = fork();
+ if ($pid != 0) {
+ print "Forked, exiting...\n";
+ exit(0);
+ }
+}
+
+# if the result directory does not already exist, mark for a clean build.
+my $do_clean = 0;
+if (not (-d $result_dir)) {
+ $do_clean = 1;
+ mkdir $result_dir;
+}
+
+if (!$suppress_logs) {
+ my $log_file = "$result_dir/complete.log";
+ open(OUT, ">>", $log_file) or die "append to $log_file: $!";
+ OUT->autoflush(1); # don't buffer writes to the main log.
+
+ # redirect stdout and stderr to the log.
+ STDOUT->fdopen(\*OUT, "w");
+ STDERR->fdopen(\*OUT, "w");
+}
+
+# pids to wait on before exiting. these are collating worker output.
+my @waitpids;
+
+chdir $result_dir;
+
+# to do a partial run, comment out the commands here you don't want to do.
+
+my $status = run_build();
+
+# end of run commands.
+
+for my $pid (@waitpids) {
+ waitpid($pid, 0);
+ $status ||= $? >> 8;
+}
+
+print "Exiting run_complete with status $status\n";
+exit $status;
+
+# get the IP address which a freshly created manager is listening on.
+sub get_manager_address
+{
+ my $log_file = shift or die;
+
+ # give the manager one second to start, any longer and something's broken.
+ sleep(1);
+
+ my $log_data = `cat $log_file`;
+ my ($port) = $log_data =~ /Listening on ([\.\:0-9]*)/
+ or die "no manager found";
+ print OUT "Connecting to manager on port $port\n" unless $suppress_logs;
+ print "Connecting to manager on port $port.\n";
+ return $1;
+}
+
+sub logging_suffix {
+ my ($show_logs, $log_file) = @_;
+ return $show_logs ? "2>&1 | tee $log_file"
+ : "> $log_file 2>&1";
+}
+
+sub run_build
+{
+ print "build started: ";
+ print scalar(localtime());
+ print "\n";
+
+ # fork off a process to run the build.
+ defined(my $pid = fork) or die;
+
+ # log file for the manager.
+ my $manager_log_file = "$result_dir/build_manager.log";
+
+ if (!$pid) {
+ # this is the child process, fork another process to run a manager.
+ defined(my $pid = fork) or die;
+ my $logging = logging_suffix($suppress_logs, $manager_log_file);
+ exec("$xmanager -terminate-on-assert $logging") if (!$pid);
+ $kill_on_exit{$pid} = 1;
+
+ if (!$suppress_logs) {
+ # open new streams to redirect stdout and stderr.
+ open(LOGOUT, "> $result_dir/build.log");
+ open(LOGERR, "> $result_dir/build_err.log");
+ STDOUT->fdopen(\*LOGOUT, "w");
+ STDERR->fdopen(\*LOGERR, "w");
+ }
+
+ my $address = get_manager_address($manager_log_file);
+
+ # write the configuration file for the wrapper script.
+ my $config_file = "$WORKDIR/xgill.config";
+ open(CONFIG, ">", $config_file) or die "create $config_file: $!";
+ print CONFIG "$prefix_dir\n";
+ print CONFIG Cwd::abs_path("$result_dir/build_xgill.log")."\n";
+ print CONFIG "$address\n";
+ my @extra = ("-fplugin-arg-xgill-mangle=1");
+ push(@extra, "-fplugin-arg-xgill-annfile=$ann_file")
+ if ($ann_file ne "" && -e $ann_file);
+ print CONFIG join(" ", @extra) . "\n";
+ close(CONFIG);
+
+ # Tell the wrapper where to find the config
+ $ENV{"XGILL_CONFIG"} = Cwd::abs_path($config_file);
+
+ # update the PATH so that the build will see the wrappers.
+ if (exists $ENV{CC}) {
+ $ENV{PATH} = dirname($ENV{CC}) . ":$ENV{PATH}";
+ delete $ENV{CC};
+ delete $ENV{CXX};
+ }
+ $ENV{"PATH"} = "$wrap_dir:" . $ENV{"PATH"};
+
+ # do the build, cleaning if necessary.
+ chdir $build_dir;
+ clean_project() if ($do_clean);
+ my $exit_status = build_project();
+
+ # signal the manager that it's over.
+ system("$xsource -remote=$address -end-manager");
+
+ # wait for the manager to clean up and terminate.
+ print "Waiting for manager to finish (build status $exit_status)...\n";
+ waitpid($pid, 0);
+ my $manager_status = $?;
+ delete $kill_on_exit{$pid};
+
+ # build is finished, the complete run can resume.
+ # return value only useful if --foreground
+ print "Exiting with status " . ($manager_status || $exit_status) . "\n";
+ exit($manager_status || $exit_status);
+ }
+
+ # this is the complete process, wait for the build to finish.
+ waitpid($pid, 0);
+ my $status = $? >> 8;
+ print "build finished (status $status): ";
+ print scalar(localtime());
+ print "\n";
+
+ return $status;
+}
+
+sub run_pass
+{
+ my ($name, $command) = @_;
+ my $log_file = "$result_dir/manager.$name.log";
+
+ # extra commands to pass to the manager.
+ my $manager_extra = "";
+ $manager_extra .= "-modset-wait=10" if ($name eq "xmemlocal");
+
+ # fork off a manager process for the analysis.
+ defined(my $pid = fork) or die;
+ my $logging = logging_suffix($suppress_logs, $log_file);
+ exec("$xmanager $manager_extra $logging") if (!$pid);
+
+ my $address = get_manager_address($log_file);
+
+ # write the poll file for this pass.
+ if (! -d dirname($poll_file)) {
+ system("mkdir", "-p", dirname($poll_file));
+ }
+ open(POLL, "> $poll_file");
+ print POLL "$command\n";
+ print POLL "$result_dir/$name\n";
+ print POLL "$address\n";
+ close(POLL);
+
+ print "$name started: ";
+ print scalar(localtime());
+ print "\n";
+
+ waitpid($pid, 0);
+ unlink($poll_file);
+
+ print "$name finished: ";
+ print scalar(localtime());
+ print "\n";
+
+ # collate the worker's output into a single file. make this asynchronous
+ # so we can wait a bit and make sure we get all worker output.
+ defined($pid = fork) or die;
+
+ if (!$pid) {
+ sleep(20);
+ exec("cat $name.*.log > $name.log");
+ }
+
+ push(@waitpids, $pid);
+}
+
+# the names of all directories containing reports to archive.
+my $indexes;
+
+sub run_index
+{
+ my ($name, $kind) = @_;
+
+ return if (not (-e "report_$kind.xdb"));
+
+ print "$name started: ";
+ print scalar(localtime());
+ print "\n";
+
+ # make an index for the report diff if applicable.
+ if ($old_dir ne "") {
+ system("make_index $kind $old_dir > $name.diff.log");
+ system("mv $kind diff_$kind");
+ $indexes .= " diff_$kind";
+ }
+
+ # make an index for the full set of reports.
+ system("make_index $kind > $name.log");
+ $indexes .= " $kind";
+
+ print "$name finished: ";
+ print scalar(localtime());
+ print "\n";
+}
+
+sub archive_indexes
+{
+ print "archive started: ";
+ print scalar(localtime());
+ print "\n";
+
+ system("tar -czf reports.tgz $indexes");
+ system("rm -rf $indexes");
+
+ print "archive finished: ";
+ print scalar(localtime());
+ print "\n";
+}
diff --git a/js/src/devtools/rootAnalysis/t/exceptions/source.cpp b/js/src/devtools/rootAnalysis/t/exceptions/source.cpp
new file mode 100644
index 000000000..14169740e
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/exceptions/source.cpp
@@ -0,0 +1,42 @@
+#define ANNOTATE(property) __attribute__((tag(property)))
+
+struct Cell { int f; } ANNOTATE("GC Thing");
+
+extern void GC() ANNOTATE("GC Call");
+
+void GC()
+{
+ // If the implementation is too trivial, the function body won't be emitted at all.
+ asm("");
+}
+
+class RAII_GC {
+ public:
+ RAII_GC() {}
+ ~RAII_GC() { GC(); }
+};
+
+// ~AutoSomething calls GC because of the RAII_GC field. The constructor,
+// though, should *not* GC -- unless it throws an exception. Which is not
+// possible when compiled with -fno-exceptions.
+class AutoSomething {
+ RAII_GC gc;
+ public:
+ AutoSomething() : gc() {
+ asm(""); // Ooh, scary, this might throw an exception
+ }
+ ~AutoSomething() {
+ asm("");
+ }
+};
+
+extern void usevar(Cell* cell);
+
+void f() {
+ Cell* thing = nullptr; // Live range starts here
+
+ {
+ AutoSomething smth; // Constructor can GC only if exceptions are enabled
+ usevar(thing); // Live range ends here
+ } // In particular, 'thing' is dead at the destructor, so no hazard
+}
diff --git a/js/src/devtools/rootAnalysis/t/exceptions/test.py b/js/src/devtools/rootAnalysis/t/exceptions/test.py
new file mode 100644
index 000000000..f6d7f5e35
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/exceptions/test.py
@@ -0,0 +1,19 @@
+test.compile("source.cpp", '-fno-exceptions')
+test.run_analysis_script('gcTypes')
+
+hazards = test.load_hazards()
+assert(len(hazards) == 0)
+
+# If we compile with exceptions, then there *should* be a hazard because
+# AutoSomething::AutoSomething might throw an exception, which would cause the
+# partially-constructed value to be torn down, which will call ~RAII_GC.
+
+test.compile("source.cpp", '-fexceptions')
+test.run_analysis_script('gcTypes')
+
+hazards = test.load_hazards()
+assert(len(hazards) == 1)
+hazard = hazards[0]
+assert(hazard.function == 'void f()')
+assert(hazard.variable == 'thing')
+assert("AutoSomething::AutoSomething" in hazard.GCFunction)
diff --git a/js/src/devtools/rootAnalysis/t/hazards/source.cpp b/js/src/devtools/rootAnalysis/t/hazards/source.cpp
new file mode 100644
index 000000000..7f84a99db
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/hazards/source.cpp
@@ -0,0 +1,186 @@
+#define ANNOTATE(property) __attribute__((tag(property)))
+
+struct Cell { int f; } ANNOTATE("GC Thing");
+
+struct RootedCell { RootedCell(Cell*) {} } ANNOTATE("Rooted Pointer");
+
+class AutoSuppressGC_Base {
+ public:
+ AutoSuppressGC_Base() {}
+ ~AutoSuppressGC_Base() {}
+} ANNOTATE("Suppress GC");
+
+class AutoSuppressGC_Child : public AutoSuppressGC_Base {
+ public:
+ AutoSuppressGC_Child() : AutoSuppressGC_Base() {}
+};
+
+class AutoSuppressGC {
+ AutoSuppressGC_Child helpImBeingSuppressed;
+
+ public:
+ AutoSuppressGC() {}
+};
+
+extern void GC() ANNOTATE("GC Call");
+extern void invisible();
+
+void GC()
+{
+ // If the implementation is too trivial, the function body won't be emitted at all.
+ asm("");
+ invisible();
+}
+
+extern void usecell(Cell*);
+
+void suppressedFunction() {
+ GC(); // Calls GC, but is always called within AutoSuppressGC
+}
+
+void halfSuppressedFunction() {
+ GC(); // Calls GC, but is sometimes called within AutoSuppressGC
+}
+
+void unsuppressedFunction() {
+ GC(); // Calls GC, never within AutoSuppressGC
+}
+
+volatile static int x = 3;
+volatile static int* xp = &x;
+struct GCInDestructor {
+ ~GCInDestructor() {
+ invisible();
+ asm("");
+ *xp = 4;
+ GC();
+ }
+};
+
+Cell*
+f()
+{
+ GCInDestructor kaboom;
+
+ Cell cell;
+ Cell* cell1 = &cell;
+ Cell* cell2 = &cell;
+ Cell* cell3 = &cell;
+ Cell* cell4 = &cell;
+ {
+ AutoSuppressGC nogc;
+ suppressedFunction();
+ halfSuppressedFunction();
+ }
+ usecell(cell1);
+ halfSuppressedFunction();
+ usecell(cell2);
+ unsuppressedFunction();
+ {
+ // Old bug: it would look from the first AutoSuppressGC constructor it
+ // found to the last destructor. This statement *should* have no effect.
+ AutoSuppressGC nogc;
+ }
+ usecell(cell3);
+ Cell* cell5 = &cell;
+ usecell(cell5);
+
+ // Hazard in return value due to ~GCInDestructor
+ Cell* cell6 = &cell;
+ return cell6;
+}
+
+Cell* copy_and_gc(Cell* src)
+{
+ GC();
+ return reinterpret_cast<Cell*>(88);
+}
+
+void use(Cell* cell)
+{
+ static int x = 0;
+ if (cell)
+ x++;
+}
+
+struct CellContainer {
+ Cell* cell;
+ CellContainer() {
+ asm("");
+ }
+};
+
+void loopy()
+{
+ Cell cell;
+
+ // No hazard: haz1 is not live during call to copy_and_gc.
+ Cell* haz1;
+ for (int i = 0; i < 10; i++) {
+ haz1 = copy_and_gc(haz1);
+ }
+
+ // No hazard: haz2 is live up to just before the GC, and starting at the
+ // next statement after it, but not across the GC.
+ Cell* haz2 = &cell;
+ for (int j = 0; j < 10; j++) {
+ use(haz2);
+ GC();
+ haz2 = &cell;
+ }
+
+ // Hazard: haz3 is live from the final statement in one iteration, across
+ // the GC in the next, to the use in the 2nd statement.
+ Cell* haz3;
+ for (int k = 0; k < 10; k++) {
+ GC();
+ use(haz3);
+ haz3 = &cell;
+ }
+
+ // Hazard: haz4 is live across a GC hidden in a loop.
+ Cell* haz4 = &cell;
+ for (int i2 = 0; i2 < 10; i2++) {
+ GC();
+ }
+ use(haz4);
+
+ // Hazard: haz5 is live from within a loop across a GC.
+ Cell* haz5;
+ for (int i3 = 0; i3 < 10; i3++) {
+ haz5 = &cell;
+ }
+ GC();
+ use(haz5);
+
+ // No hazard: similar to the haz3 case, but verifying that we do not get
+ // into an infinite loop.
+ Cell* haz6;
+ for (int i4 = 0; i4 < 10; i4++) {
+ GC();
+ haz6 = &cell;
+ }
+
+ // No hazard: haz7 is constructed within the body, so it can't make a
+ // hazard across iterations. Note that this requires CellContainer to have
+ // a constructor, because otherwise the analysis doesn't see where
+ // variables are declared. (With the constructor, it knows that
+ // construction of haz7 obliterates any previous value it might have had.
+ // Not that that's possible given its scope, but the analysis doesn't get
+ // that information.)
+ for (int i5 = 0; i5 < 10; i5++) {
+ GC();
+ CellContainer haz7;
+ use(haz7.cell);
+ haz7.cell = &cell;
+ }
+
+ // Hazard: make sure we *can* see hazards across iterations involving
+ // CellContainer;
+ CellContainer haz8;
+ for (int i6 = 0; i6 < 10; i6++) {
+ GC();
+ use(haz8.cell);
+ haz8.cell = &cell;
+ }
+}
diff --git a/js/src/devtools/rootAnalysis/t/hazards/test.py b/js/src/devtools/rootAnalysis/t/hazards/test.py
new file mode 100644
index 000000000..3eb08aa09
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/hazards/test.py
@@ -0,0 +1,47 @@
+test.compile("source.cpp")
+test.run_analysis_script('gcTypes')
+
+# gcFunctions should be the inverse, but we get to rely on unmangled names here.
+gcFunctions = test.load_gcFunctions()
+print(gcFunctions)
+assert('void GC()' in gcFunctions)
+assert('void suppressedFunction()' not in gcFunctions)
+assert('void halfSuppressedFunction()' in gcFunctions)
+assert('void unsuppressedFunction()' in gcFunctions)
+assert('Cell* f()' in gcFunctions)
+
+hazards = test.load_hazards()
+hazmap = {haz.variable: haz for haz in hazards}
+assert('cell1' not in hazmap)
+assert('cell2' in hazmap)
+assert('cell3' in hazmap)
+assert('cell4' not in hazmap)
+assert('cell5' not in hazmap)
+assert('cell6' not in hazmap)
+assert('<returnvalue>' in hazmap)
+
+# All hazards should be in f() and loopy()
+assert(hazmap['cell2'].function == 'Cell* f()')
+print(len(set(haz.function for haz in hazards)))
+assert(len(set(haz.function for haz in hazards)) == 2)
+
+# Check that the correct GC call is reported for each hazard. (cell3 has a
+# hazard from two different GC calls; it doesn't really matter which is
+# reported.)
+assert(hazmap['cell2'].GCFunction == 'void halfSuppressedFunction()')
+assert(hazmap['cell3'].GCFunction in ('void halfSuppressedFunction()', 'void unsuppressedFunction()'))
+assert(hazmap['<returnvalue>'].GCFunction == 'void GCInDestructor::~GCInDestructor()')
+
+# Type names are handy to have in the report.
+assert(hazmap['cell2'].type == 'Cell*')
+assert(hazmap['<returnvalue>'].type == 'Cell*')
+
+# loopy hazards. See comments in source.
+assert('haz1' not in hazmap);
+assert('haz2' not in hazmap);
+assert('haz3' in hazmap);
+assert('haz4' in hazmap);
+assert('haz5' in hazmap);
+assert('haz6' not in hazmap);
+assert('haz7' not in hazmap);
+assert('haz8' in hazmap);
diff --git a/js/src/devtools/rootAnalysis/t/sixgill-tree/source.cpp b/js/src/devtools/rootAnalysis/t/sixgill-tree/source.cpp
new file mode 100644
index 000000000..2de9ef4bb
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/sixgill-tree/source.cpp
@@ -0,0 +1,70 @@
+#define ANNOTATE(property) __attribute__((tag(property)))
+
+namespace js {
+namespace gc {
+struct Cell { int f; } ANNOTATE("GC Thing");
+}
+}
+
+struct Bogon {
+};
+
+struct JustACell : public js::gc::Cell {
+ bool iHaveNoDataMembers() { return true; }
+};
+
+struct JSObject : public js::gc::Cell, public Bogon {
+ int g;
+};
+
+struct SpecialObject : public JSObject {
+ int z;
+};
+
+struct ErrorResult {
+ bool hasObj;
+ JSObject *obj;
+ void trace() {}
+} ANNOTATE("Suppressed GC Pointer");
+
+struct OkContainer {
+ ErrorResult res;
+ bool happy;
+};
+
+struct UnrootedPointer {
+ JSObject *obj;
+};
+
+template <typename T>
+class Rooted {
+ T data;
+} ANNOTATE("Rooted Pointer");
+
+extern void js_GC() ANNOTATE("GC Call") ANNOTATE("Slow");
+
+void js_GC() {}
+
+void root_arg(JSObject *obj, JSObject *random)
+{
+ // Use all these types so they get included in the output.
+ SpecialObject so;
+ UnrootedPointer up;
+ Bogon b;
+ OkContainer okc;
+ Rooted<JSObject*> ro;
+ Rooted<SpecialObject*> rso;
+
+ obj = random;
+
+ JSObject *other1 = obj;
+ js_GC();
+
+ float MARKER1 = 0;
+ JSObject *other2 = obj;
+ other1->f = 1;
+ other2->f = -1;
+
+ unsigned int u1 = 1;
+ unsigned int u2 = -1;
+}
diff --git a/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py b/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py
new file mode 100644
index 000000000..c0c0263cd
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py
@@ -0,0 +1,60 @@
+import re
+
+test.compile("source.cpp")
+test.computeGCTypes()
+body = test.process_body(test.load_db_entry("src_body", re.compile(r'root_arg'))[0])
+
+# Rendering positive and negative integers
+marker1 = body.assignment_line('MARKER1')
+equal(body.edge_from_line(marker1 + 2)['Exp'][1]['String'], '1')
+equal(body.edge_from_line(marker1 + 3)['Exp'][1]['String'], '-1')
+
+equal(body.edge_from_point(body.assignment_point('u1'))['Exp'][1]['String'], '1')
+equal(body.edge_from_point(body.assignment_point('u2'))['Exp'][1]['String'], '4294967295')
+
+assert('obj' in body['Variables'])
+assert('random' in body['Variables'])
+assert('other1' in body['Variables'])
+assert('other2' in body['Variables'])
+
+# Test function annotations
+js_GC = test.process_body(test.load_db_entry("src_body", re.compile(r'js_GC'))[0])
+annotations = js_GC['Variables']['void js_GC()']['Annotation']
+assert(annotations)
+found_call_tag = False
+for annotation in annotations:
+ (annType, value) = annotation['Name']
+ if annType == 'Tag' and value == 'GC Call':
+ found_call_tag = True
+assert(found_call_tag)
+
+# Test type annotations
+
+# js::gc::Cell first
+cell = test.load_db_entry("src_comp", 'js::gc::Cell')[0]
+assert(cell['Kind'] == 'Struct')
+annotations = cell['Annotation']
+assert(len(annotations) == 1)
+(tag, value) = annotations[0]['Name']
+assert(tag == 'Tag')
+assert(value == 'GC Thing')
+
+# Check JSObject inheritance.
+JSObject = test.load_db_entry("src_comp", 'JSObject')[0]
+bases = [ b['Base'] for b in JSObject['CSUBaseClass'] ]
+assert('js::gc::Cell' in bases)
+assert('Bogon' in bases)
+assert(len(bases) == 2)
+
+# Check type analysis
+gctypes = test.load_gcTypes()
+assert('js::gc::Cell' in gctypes['GCThings'])
+assert('JustACell' in gctypes['GCThings'])
+assert('JSObject' in gctypes['GCThings'])
+assert('SpecialObject' in gctypes['GCThings'])
+assert('UnrootedPointer' in gctypes['GCPointers'])
+assert('Bogon' not in gctypes['GCThings'])
+assert('Bogon' not in gctypes['GCPointers'])
+assert('ErrorResult' not in gctypes['GCPointers'])
+assert('OkContainer' not in gctypes['GCPointers'])
+assert('class Rooted<JSObject*>' not in gctypes['GCPointers'])
diff --git a/js/src/devtools/rootAnalysis/t/sixgill.py b/js/src/devtools/rootAnalysis/t/sixgill.py
new file mode 100644
index 000000000..2bdf76a49
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/sixgill.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import defaultdict
+
+# Simplified version of the body info.
+class Body(dict):
+ def __init__(self, body):
+ self['BlockIdKind'] = body['BlockId']['Kind']
+ if 'Variable' in body['BlockId']:
+ self['BlockName'] = body['BlockId']['Variable']['Name'][0].split("$")[-1]
+ loc = body['Location']
+ self['LineRange'] = (loc[0]['Line'], loc[1]['Line'])
+ self['Filename'] = loc[0]['CacheString']
+ self['Edges'] = body.get('PEdge', [])
+ self['Points'] = { i: p['Location']['Line'] for i, p in enumerate(body['PPoint'], 1) }
+ self['Index'] = body['Index']
+ self['Variables'] = { x['Variable']['Name'][0].split("$")[-1]: x['Type'] for x in body['DefineVariable'] }
+
+ # Indexes
+ self['Line2Points'] = defaultdict(list)
+ for point, line in self['Points'].items():
+ self['Line2Points'][line].append(point)
+ self['SrcPoint2Edges'] = defaultdict(list)
+ for edge in self['Edges']:
+ src, dst = edge['Index']
+ self['SrcPoint2Edges'][src].append(edge)
+ self['Line2Edges'] = defaultdict(list)
+ for (src, edges) in self['SrcPoint2Edges'].items():
+ line = self['Points'][src]
+ self['Line2Edges'][line].extend(edges)
+
+ def edges_from_line(self, line):
+ return self['Line2Edges'][line]
+
+ def edge_from_line(self, line):
+ edges = self.edges_from_line(line)
+ assert(len(edges) == 1)
+ return edges[0]
+
+ def edges_from_point(self, point):
+ return self['SrcPoint2Edges'][point]
+
+ def edge_from_point(self, point):
+ edges = self.edges_from_point(point)
+ assert(len(edges) == 1)
+ return edges[0]
+
+ def assignment_point(self, varname):
+ for edge in self['Edges']:
+ if edge['Kind'] != 'Assign':
+ continue
+ dst = edge['Exp'][0]
+ if dst['Kind'] != 'Var':
+ continue
+ if dst['Variable']['Name'][0] == varname:
+ return edge['Index'][0]
+ raise Exception("assignment to variable %s not found" % varname)
+
+ def assignment_line(self, varname):
+ return self['Points'][self.assignment_point(varname)]
diff --git a/js/src/devtools/rootAnalysis/t/suppression/source.cpp b/js/src/devtools/rootAnalysis/t/suppression/source.cpp
new file mode 100644
index 000000000..e7b41b4cb
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/suppression/source.cpp
@@ -0,0 +1,64 @@
+#define ANNOTATE(property) __attribute__((tag(property)))
+
+struct Cell { int f; } ANNOTATE("GC Thing");
+
+class AutoSuppressGC_Base {
+ public:
+ AutoSuppressGC_Base() {}
+ ~AutoSuppressGC_Base() {}
+} ANNOTATE("Suppress GC");
+
+class AutoSuppressGC_Child : public AutoSuppressGC_Base {
+ public:
+ AutoSuppressGC_Child() : AutoSuppressGC_Base() {}
+};
+
+class AutoSuppressGC {
+ AutoSuppressGC_Child helpImBeingSuppressed;
+
+ public:
+ AutoSuppressGC() {}
+};
+
+extern void GC() ANNOTATE("GC Call");
+
+void GC()
+{
+ // If the implementation is too trivial, the function body won't be emitted at all.
+ asm("");
+}
+
+extern void foo(Cell*);
+
+void suppressedFunction() {
+ GC(); // Calls GC, but is always called within AutoSuppressGC
+}
+
+void halfSuppressedFunction() {
+ GC(); // Calls GC, but is sometimes called within AutoSuppressGC
+}
+
+void unsuppressedFunction() {
+ GC(); // Calls GC, never within AutoSuppressGC
+}
+
+void f() {
+ Cell* cell1 = nullptr;
+ Cell* cell2 = nullptr;
+ Cell* cell3 = nullptr;
+ {
+ AutoSuppressGC nogc;
+ suppressedFunction();
+ halfSuppressedFunction();
+ }
+ foo(cell1);
+ halfSuppressedFunction();
+ foo(cell2);
+ unsuppressedFunction();
+ {
+ // Old bug: it would look from the first AutoSuppressGC constructor it
+ // found to the last destructor. This statement *should* have no effect.
+ AutoSuppressGC nogc;
+ }
+ foo(cell3);
+}
diff --git a/js/src/devtools/rootAnalysis/t/suppression/test.py b/js/src/devtools/rootAnalysis/t/suppression/test.py
new file mode 100644
index 000000000..65974cc33
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/suppression/test.py
@@ -0,0 +1,23 @@
+test.compile("source.cpp")
+test.run_analysis_script('gcTypes', upto='gcFunctions')
+
+# The suppressions file uses only mangled names since it's for internal use,
+# though I may change that soon given (1) the unfortunate non-uniqueness of
+# mangled constructor names, and (2) the usefulness of this file for
+# mrgiggles's reporting.
+suppressed = test.load_suppressed_functions()
+
+# Only one of these is fully suppressed (ie, *always* called within the scope
+# of an AutoSuppressGC).
+assert(len(filter(lambda f: 'suppressedFunction' in f, suppressed)) == 1)
+assert(len(filter(lambda f: 'halfSuppressedFunction' in f, suppressed)) == 0)
+assert(len(filter(lambda f: 'unsuppressedFunction' in f, suppressed)) == 0)
+
+# gcFunctions should be the inverse, but we get to rely on unmangled names here.
+gcFunctions = test.load_gcFunctions()
+print(gcFunctions)
+assert('void GC()' in gcFunctions)
+assert('void suppressedFunction()' not in gcFunctions)
+assert('void halfSuppressedFunction()' in gcFunctions)
+assert('void unsuppressedFunction()' in gcFunctions)
+assert('void f()' in gcFunctions)
diff --git a/js/src/devtools/rootAnalysis/t/testlib.py b/js/src/devtools/rootAnalysis/t/testlib.py
new file mode 100644
index 000000000..438398f1e
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/t/testlib.py
@@ -0,0 +1,120 @@
+import json
+import os
+import re
+import subprocess
+
+from sixgill import Body
+from collections import defaultdict, namedtuple
+
+scriptdir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+
+HazardSummary = namedtuple('HazardSummary', ['function', 'variable', 'type', 'GCFunction', 'location'])
+
+
+def equal(got, expected):
+ if got != expected:
+ print("Got '%s', expected '%s'" % (got, expected))
+
+def extract_unmangled(func):
+ return func.split('$')[-1]
+
+
+class Test(object):
+ def __init__(self, indir, outdir, cfg):
+ self.indir = indir
+ self.outdir = outdir
+ self.cfg = cfg
+
+ def infile(self, path):
+ return os.path.join(self.indir, path)
+
+ def binpath(self, prog):
+ return os.path.join(self.cfg.sixgill_bin, prog)
+
+ def compile(self, source, options = ''):
+ cmd = "{CXX} -c {source} -O3 -std=c++11 -fplugin={sixgill} -fplugin-arg-xgill-mangle=1 {options}".format(
+ source=self.infile(source),
+ CXX=self.cfg.cxx, sixgill=self.cfg.sixgill_plugin,
+ options=options)
+ if self.cfg.verbose:
+ print("Running %s" % cmd)
+ subprocess.check_call(["sh", "-c", cmd])
+
+ def load_db_entry(self, dbname, pattern):
+ '''Look up an entry from an XDB database file, 'pattern' may be an exact
+ matching string, or an re pattern object matching a single entry.'''
+
+ if not isinstance(pattern, basestring):
+ output = subprocess.check_output([self.binpath("xdbkeys"), dbname + ".xdb"])
+ matches = filter(lambda _: re.search(pattern, _), output.splitlines())
+ if len(matches) == 0:
+ raise Exception("entry not found")
+ if len(matches) > 1:
+ raise Exception("multiple entries found")
+ pattern = matches[0]
+
+ output = subprocess.check_output([self.binpath("xdbfind"), "-json", dbname + ".xdb", pattern])
+ return json.loads(output)
+
+ def run_analysis_script(self, phase, upto=None):
+ file("defaults.py", "w").write('''\
+analysis_scriptdir = '{scriptdir}'
+sixgill_bin = '{bindir}'
+'''.format(scriptdir=scriptdir, bindir=self.cfg.sixgill_bin))
+ cmd = [os.path.join(scriptdir, "analyze.py"), phase]
+ if upto:
+ cmd += ["--upto", upto]
+ cmd.append("--source=%s" % self.indir)
+ cmd.append("--objdir=%s" % self.outdir)
+ cmd.append("--js=%s" % self.cfg.js)
+ if self.cfg.verbose:
+ cmd.append("--verbose")
+ print("Running " + " ".join(cmd))
+ subprocess.check_call(cmd)
+
+ def computeGCTypes(self):
+ self.run_analysis_script("gcTypes", upto="gcTypes")
+
+ def computeHazards(self):
+ self.run_analysis_script("gcTypes")
+
+ def load_text_file(self, filename, extract=lambda l: l):
+ fullpath = os.path.join(self.outdir, filename)
+ values = (extract(line.strip()) for line in file(fullpath))
+ return filter(lambda _: _ is not None, values)
+
+ def load_suppressed_functions(self):
+ return set(self.load_text_file("suppressedFunctions.lst"))
+
+ def load_gcTypes(self):
+ def grab_type(line):
+ m = re.match(r'^(GC\w+): (.*)', line)
+ if m:
+ return (m.group(1) + 's', m.group(2))
+ return None
+
+ gctypes = defaultdict(list)
+ for collection, typename in self.load_text_file('gcTypes.txt', extract=grab_type):
+ gctypes[collection].append(typename)
+ return gctypes
+
+ def load_gcFunctions(self):
+ return self.load_text_file('gcFunctions.lst', extract=extract_unmangled)
+
+ def load_hazards(self):
+ def grab_hazard(line):
+ m = re.match(r"Function '(.*?)' has unrooted '(.*?)' of type '(.*?)' live across GC call '(.*?)' at (.*)", line)
+ if m:
+ info = list(m.groups())
+ info[0] = info[0].split("$")[-1]
+ info[3] = info[3].split("$")[-1]
+ return HazardSummary(*info)
+ return None
+
+ return self.load_text_file('rootingHazards.txt', extract=grab_hazard)
+
+ def process_body(self, body):
+ return Body(body)
+
+ def process_bodies(self, bodies):
+ return [self.process_body(b) for b in bodies]
diff --git a/js/src/devtools/rootAnalysis/utility.js b/js/src/devtools/rootAnalysis/utility.js
new file mode 100644
index 000000000..06c18804f
--- /dev/null
+++ b/js/src/devtools/rootAnalysis/utility.js
@@ -0,0 +1,211 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- */
+
+"use strict";
+
+// gcc appends this to mangled function names for "not in charge"
+// constructors/destructors.
+var internalMarker = " *INTERNAL* ";
+
+if (! Set.prototype.hasOwnProperty("update")) {
+ Object.defineProperty(Set.prototype, "update", {
+ value: function (collection) {
+ for (let elt of collection)
+ this.add(elt);
+ }
+ });
+}
+
+function assert(x, msg)
+{
+ if (x)
+ return;
+ debugger;
+ if (msg)
+ throw "assertion failed: " + msg + "\n" + (Error().stack);
+ else
+ throw "assertion failed: " + (Error().stack);
+}
+
+function defined(x) {
+ return x !== undefined;
+}
+
+function xprint(x, padding)
+{
+ if (!padding)
+ padding = "";
+ if (x instanceof Array) {
+ print(padding + "[");
+ for (var elem of x)
+ xprint(elem, padding + " ");
+ print(padding + "]");
+ } else if (x instanceof Object) {
+ print(padding + "{");
+ for (var prop in x) {
+ print(padding + " " + prop + ":");
+ xprint(x[prop], padding + " ");
+ }
+ print(padding + "}");
+ } else {
+ print(padding + x);
+ }
+}
+
+function sameBlockId(id0, id1)
+{
+ if (id0.Kind != id1.Kind)
+ return false;
+ if (!sameVariable(id0.Variable, id1.Variable))
+ return false;
+ if (id0.Kind == "Loop" && id0.Loop != id1.Loop)
+ return false;
+ return true;
+}
+
+function sameVariable(var0, var1)
+{
+ assert("Name" in var0 || var0.Kind == "This" || var0.Kind == "Return");
+ assert("Name" in var1 || var1.Kind == "This" || var1.Kind == "Return");
+ if ("Name" in var0)
+ return "Name" in var1 && var0.Name[0] == var1.Name[0];
+ return var0.Kind == var1.Kind;
+}
+
+function blockIdentifier(body)
+{
+ if (body.BlockId.Kind == "Loop")
+ return body.BlockId.Loop;
+ assert(body.BlockId.Kind == "Function", "body.Kind should be Function, not " + body.BlockId.Kind);
+ return body.BlockId.Variable.Name[0];
+}
+
+function collectBodyEdges(body)
+{
+ body.predecessors = [];
+ body.successors = [];
+ if (!("PEdge" in body))
+ return;
+
+ for (var edge of body.PEdge) {
+ var [ source, target ] = edge.Index;
+ if (!(target in body.predecessors))
+ body.predecessors[target] = [];
+ body.predecessors[target].push(edge);
+ if (!(source in body.successors))
+ body.successors[source] = [];
+ body.successors[source].push(edge);
+ }
+}
+
+function getPredecessors(body)
+{
+ try {
+ if (!('predecessors' in body))
+ collectBodyEdges(body);
+ } catch (e) {
+ debugger;
+ printErr("body is " + body);
+ }
+ return body.predecessors;
+}
+
+function getSuccessors(body)
+{
+ if (!('successors' in body))
+ collectBodyEdges(body);
+ return body.successors;
+}
+
+// Split apart a function from sixgill into its mangled and unmangled name. If
+// no mangled name was given, use the unmangled name as its mangled name
+function splitFunction(func)
+{
+ var split = func.indexOf("$");
+ if (split != -1)
+ return [ func.substr(0, split), func.substr(split+1) ];
+ split = func.indexOf("|");
+ if (split != -1)
+ return [ func.substr(0, split), func.substr(split+1) ];
+ return [ func, func ];
+}
+
+function mangled(fullname)
+{
+ var [ mangled, unmangled ] = splitFunction(fullname);
+ return mangled;
+}
+
+function readable(fullname)
+{
+ var [ mangled, unmangled ] = splitFunction(fullname);
+ return unmangled;
+}
+
+function xdbLibrary()
+{
+ var lib = ctypes.open(os.getenv('XDB'));
+ var api = {
+ open: lib.declare("xdb_open", ctypes.default_abi, ctypes.void_t, ctypes.char.ptr),
+ min_data_stream: lib.declare("xdb_min_data_stream", ctypes.default_abi, ctypes.int),
+ max_data_stream: lib.declare("xdb_max_data_stream", ctypes.default_abi, ctypes.int),
+ read_key: lib.declare("xdb_read_key", ctypes.default_abi, ctypes.char.ptr, ctypes.int),
+ read_entry: lib.declare("xdb_read_entry", ctypes.default_abi, ctypes.char.ptr, ctypes.char.ptr),
+ free_string: lib.declare("xdb_free", ctypes.default_abi, ctypes.void_t, ctypes.char.ptr)
+ };
+ try {
+ api.lookup_key = lib.declare("xdb_lookup_key", ctypes.default_abi, ctypes.int, ctypes.char.ptr);
+ } catch (e) {
+ // lookup_key is for development use only and is not strictly necessary.
+ }
+ return api;
+}
+
+function cLibrary()
+{
+ var lib;
+ try {
+ lib = ctypes.open("libc.so.6");
+ } catch(e) {
+ lib = ctypes.open("libc.so");
+ }
+
+ return {
+ fopen: lib.declare("fopen", ctypes.default_abi, ctypes.void_t.ptr, ctypes.char.ptr, ctypes.char.ptr),
+ getline: lib.declare("getline", ctypes.default_abi, ctypes.ssize_t, ctypes.char.ptr.ptr, ctypes.size_t.ptr, ctypes.void_t.ptr),
+ fclose: lib.declare("fclose", ctypes.default_abi, ctypes.int, ctypes.void_t.ptr),
+ free: lib.declare("free", ctypes.default_abi, ctypes.void_t, ctypes.void_t.ptr),
+ };
+}
+
+function* readFileLines_gen(filename)
+{
+ var libc = cLibrary();
+ var linebuf = ctypes.char.ptr();
+ var bufsize = ctypes.size_t(0);
+ var fp = libc.fopen(filename, "r");
+ if (fp.isNull())
+ throw "Unable to open '" + filename + "'"
+
+ while (libc.getline(linebuf.address(), bufsize.address(), fp) > 0)
+ yield linebuf.readString();
+ libc.fclose(fp);
+ libc.free(ctypes.void_t.ptr(linebuf));
+}
+
+function addToKeyedList(collection, key, entry)
+{
+ if (!(key in collection))
+ collection[key] = [];
+ collection[key].push(entry);
+}
+
+function loadTypeInfo(filename)
+{
+ var info = {};
+ for (var line of readFileLines_gen(filename)) {
+ line = line.replace(/\n/, "");
+ let [property, name] = line.split("$$");
+ addToKeyedList(info, property, name);
+ }
+ return info;
+}
diff --git a/js/src/devtools/sharkctl.cpp b/js/src/devtools/sharkctl.cpp
new file mode 100644
index 000000000..7b2dc02ae
--- /dev/null
+++ b/js/src/devtools/sharkctl.cpp
@@ -0,0 +1,207 @@
+/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifdef __APPLE__
+
+#include "sharkctl.h"
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include "jsutil.h"
+
+#define SHARK_MSG_ACQUIRE 0x29a
+#define SHARK_MSG_RELEASE 0x29b
+#define SHARK_MSG_STOP 0x29c
+#define SHARK_MSG_START 0x29d
+
+#define RECV_SIZEOF(ty) offsetof(ty, out)
+
+// Private API in libSystem.dylib
+extern "C" void bootstrap_look_up(mach_port_t special_port, const char* name,
+ mach_port_t* dest_port);
+
+struct chud_client_acquire_msg {
+ mach_msg_header_t hdr;
+ uint32_t unk0; // always 0
+ uint32_t unk1; // always 1
+ uint32_t pid;
+ uint32_t out[2];
+};
+
+struct chud_client_start_msg {
+ mach_msg_header_t hdr;
+ uint32_t unk0; // always 1
+ uint32_t name0;
+ uint32_t arg2; // always 6
+ uint8_t unk1; // always 0
+ uint8_t unk2; // always 1
+ uint8_t unk3; // uninitialized
+ uint8_t unk4; // always 1
+ uint32_t unk5; // always 0
+ uint32_t unk6; // always 1
+ uint32_t name1; // same as name0
+};
+
+struct chud_client_stop_msg {
+ mach_msg_header_t hdr;
+ uint32_t out[5];
+};
+
+struct chud_client_release_msg {
+ mach_msg_header_t hdr;
+ uint32_t unk0; // always 0
+ uint32_t unk1; // always 1
+ uint32_t pid;
+ uint32_t out[2];
+};
+
+static mach_port_t
+CreatePort(void)
+{
+ mach_port_t bootstrap_port, shark_port = 0;
+ task_get_special_port(mach_task_self(), TASK_BOOTSTRAP_PORT,
+ &bootstrap_port);
+ bootstrap_look_up(bootstrap_port, "CHUD_IPC", &shark_port);
+ return shark_port;
+}
+
+static mach_msg_return_t
+Connect(mach_port_t shark_port)
+{
+ mach_port_t reply_port = mig_get_reply_port();
+
+ struct chud_client_acquire_msg msg;
+ msg.hdr.msgh_bits = 0x1513;
+ msg.hdr.msgh_size = sizeof(mach_msg_header_t);
+ msg.hdr.msgh_remote_port = shark_port;
+ msg.hdr.msgh_local_port = reply_port;
+ msg.hdr.msgh_reserved = 0;
+ msg.hdr.msgh_id = SHARK_MSG_ACQUIRE;
+ msg.unk0 = 0;
+ msg.unk1 = 1;
+ msg.pid = getpid();
+
+ MOZ_ASSERT(RECV_SIZEOF(struct chud_client_acquire_msg) == 0x24);
+ MOZ_ASSERT(sizeof(msg) == 0x2c);
+ mach_msg_return_t result = mach_msg(&msg.hdr, MACH_SEND_MSG | MACH_RCV_MSG,
+ RECV_SIZEOF(struct chud_client_acquire_msg),
+ sizeof(msg), reply_port, 0, 0);
+ mig_dealloc_reply_port(reply_port);
+ return result;
+}
+
+static mach_msg_return_t
+Start(mach_port_t shark_port, uint32_t name)
+{
+ mach_port_t reply_port = mig_get_reply_port();
+
+ struct chud_client_start_msg msg;
+ msg.hdr.msgh_bits = 0x80001513;
+ msg.hdr.msgh_size = sizeof(mach_msg_header_t);
+ msg.hdr.msgh_remote_port = shark_port;
+ msg.hdr.msgh_local_port = reply_port;
+ msg.hdr.msgh_reserved = 0;
+ msg.hdr.msgh_id = SHARK_MSG_START;
+ msg.unk0 = 1;
+ msg.name0 = name;
+ msg.arg2 = 6;
+ msg.unk1 = 0;
+ msg.unk2 = 1;
+ msg.unk3 = 0;
+ msg.unk4 = 1;
+ msg.unk5 = 0;
+ msg.unk6 = 1;
+ msg.name1 = name;
+
+ MOZ_ASSERT(sizeof(msg) == 0x34);
+ mach_msg_return_t result = mach_msg(&msg.hdr, MACH_SEND_MSG | MACH_RCV_MSG,
+ sizeof(msg), 0x30, reply_port, 0, 0);
+ mig_dealloc_reply_port(reply_port);
+ return result;
+}
+
+mach_msg_return_t
+Stop(mach_port_t shark_port)
+{
+ mach_port_t reply_port = mig_get_reply_port();
+
+ struct chud_client_stop_msg msg;
+ msg.hdr.msgh_bits = 0x1513;
+ msg.hdr.msgh_size = sizeof(mach_msg_header_t);
+ msg.hdr.msgh_remote_port = shark_port;
+ msg.hdr.msgh_local_port = reply_port;
+ msg.hdr.msgh_reserved = 0;
+ msg.hdr.msgh_id = SHARK_MSG_STOP;
+
+ MOZ_ASSERT(RECV_SIZEOF(struct chud_client_stop_msg) == 0x18);
+ MOZ_ASSERT(sizeof(msg) == 0x2c);
+ mach_msg_return_t result = mach_msg(&msg.hdr, MACH_SEND_MSG | MACH_RCV_MSG,
+ RECV_SIZEOF(struct chud_client_stop_msg),
+ sizeof(msg), reply_port, 0, 0);
+ mig_dealloc_reply_port(reply_port);
+ return result;
+}
+
+static mach_msg_return_t
+Disconnect(mach_port_t shark_port)
+{
+ mach_port_t reply_port = mig_get_reply_port();
+
+ struct chud_client_release_msg msg;
+ msg.hdr.msgh_bits = 0x1513;
+ msg.hdr.msgh_size = sizeof(mach_msg_header_t);
+ msg.hdr.msgh_remote_port = shark_port;
+ msg.hdr.msgh_local_port = reply_port;
+ msg.hdr.msgh_reserved = 0;
+ msg.hdr.msgh_id = SHARK_MSG_RELEASE;
+ msg.unk0 = 0;
+ msg.unk1 = 1;
+ msg.pid = getpid();
+
+ MOZ_ASSERT(RECV_SIZEOF(struct chud_client_release_msg) == 0x24);
+ MOZ_ASSERT(sizeof(msg) == 0x2c);
+ mach_msg_return_t result = mach_msg(&msg.hdr, MACH_SEND_MSG | MACH_RCV_MSG,
+ RECV_SIZEOF(struct chud_client_release_msg),
+ sizeof(msg), reply_port, 0, 0);
+ mig_dealloc_reply_port(reply_port);
+ return result;
+}
+
+static mach_port_t shark_port = 0;
+static bool connected = false;
+static bool running = false;
+
+namespace Shark {
+
+bool
+Start()
+{
+ if (!shark_port && !(shark_port = CreatePort()))
+ return false;
+ if (!connected && Connect(shark_port))
+ return false;
+ connected = true;
+ if (!running && ::Start(shark_port, 0xdeadbeef))
+ return false;
+ return running = true;
+}
+
+void
+Stop()
+{
+ if (!shark_port || !connected)
+ return;
+ ::Stop(shark_port);
+ running = false;
+ Disconnect(shark_port);
+ connected = false;
+}
+
+} // namespace Shark
+
+#endif
diff --git a/js/src/devtools/sharkctl.h b/js/src/devtools/sharkctl.h
new file mode 100644
index 000000000..f09ed8be3
--- /dev/null
+++ b/js/src/devtools/sharkctl.h
@@ -0,0 +1,24 @@
+/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef devtools_sharkctl_h
+#define devtools_sharkctl_h
+
+#ifdef __APPLE__
+
+#include <mach/mach.h>
+#include <stdint.h>
+
+namespace Shark {
+
+bool Start();
+void Stop();
+
+} // namespace Shark
+
+#endif /* __APPLE__ */
+
+#endif /* devtools_sharkctl_h */
diff --git a/js/src/devtools/vprof/manifest.mk b/js/src/devtools/vprof/manifest.mk
new file mode 100644
index 000000000..e18a17fb5
--- /dev/null
+++ b/js/src/devtools/vprof/manifest.mk
@@ -0,0 +1,7 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+avmplus_CXXSRCS := $(avmplus_CXXSRCS) \
+ $(curdir)/vprof.cpp \
+ $(NULL)
diff --git a/js/src/devtools/vprof/readme.txt b/js/src/devtools/vprof/readme.txt
new file mode 100644
index 000000000..f84bfc27e
--- /dev/null
+++ b/js/src/devtools/vprof/readme.txt
@@ -0,0 +1,97 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+The two files vprof.h and vprof.cpp implement a simple value-profiling mechanism. By including these two files in avmplus (or any other project), you can value profile data as you wish (currently integers).
+
+Usage:
+#include "vprof.h" // in the source file you want to use it
+
+_vprof (value);
+
+At the end of the execution, for each probe you'll get the data associated with the probe, such as:
+
+File line avg [min : max] total count
+..\..\pcre\pcre_valid_utf8.cpp 182 50222.75916 [0 : 104947] 4036955604 80381
+
+The probe is defined at line 182 of file pcre_vali_utf8.cpp. It was called 80381 times. The min value of the probe was 0 while its max was 10497 and its average was 50222.75916. The total sum of all values of the probe is 4036955604. Later, I plan to add more options on the spectrum of data among others.
+
+A few typical uses
+------------------
+
+To see how many times a given function gets executed do:
+
+void f()
+{
+ _vprof(1);
+ ...
+}
+
+void f()
+{
+ _vprof(1);
+ ...
+ if (...) {
+ _vprof(1);
+ ...
+ } else {
+ _vprof(1);
+ ...
+ }
+}
+
+Here are a few examples of using the value-profiling utility:
+
+ _vprof (e);
+ at the end of program execution, you'll get a dump of the source location of this probe,
+ its min, max, average, the total sum of all instances of e, and the total number of times this probe was called.
+
+ _vprof (x > 0);
+ shows how many times and what percentage of the cases x was > 0,
+ that is the probablitiy that x > 0.
+
+ _vprof (n % 2 == 0);
+ shows how many times n was an even number
+ as well as th probablitiy of n being an even number.
+
+ _hprof (n, 4, 1000, 5000, 5001, 10000);
+ gives you the histogram of n over the given 4 bucket boundaries:
+ # cases < 1000
+ # cases >= 1000 and < 5000
+ # cases >= 5000 and < 5001
+ # cases >= 5001 and < 10000
+ # cases >= 10000
+
+ _nvprof ("event name", value);
+ all instances with the same name are merged
+ so, you can call _vprof with the same event name at difference places
+
+ _vprof (e, myProbe);
+ value profile e and call myProbe (void* vprofID) at the profiling point.
+ inside the probe, the client has the predefined variables:
+ _VAL, _COUNT, _SUM, _MIN, _MAX, and the general purpose registers
+ _IVAR1, ..., IVAR4 general integer registrs
+ _I64VAR1, ..., I64VAR4 general integer64 registrs
+ _DVAR1, ..., _DVAR4 general double registers
+ _GENPTR a generic pointer that can be used by the client
+ the number of registers can be changed in vprof.h
+
+Named Events
+------------
+_nvprof ("event name", value);
+ all instances with the same name are merged
+ so, you can call _vprof with the same event name at difference places
+
+
+Custom Probes
+--------------
+You can call your own custom probe at the profiling point.
+_vprof (v, myProbe);
+ value profile v and call myProbe (void* vprofID) at the profiling point
+ inside the probe, the client has the predefined variables:
+ _VAL, _COUNT, _SUM, _MIN, _MAX, and the general purpose registers
+ _IVAR1, ..., IVAR4 general integer registrs
+ _I64VAR1, ..., I64VAR4 general integer64 registrs
+ _DVAR1, ..., _DVAR4 general double registers
+ the number of registers can be changed in vprof.h
+ _GENPTR a generic pointer that can be used for almost anything
diff --git a/js/src/devtools/vprof/testVprofMT.c b/js/src/devtools/vprof/testVprofMT.c
new file mode 100644
index 000000000..b6041d636
--- /dev/null
+++ b/js/src/devtools/vprof/testVprofMT.c
@@ -0,0 +1,92 @@
+/* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: t; tab-width: 4 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <windows.h>
+#include <stdio.h>
+#include <time.h>
+
+#include "vprof.h"
+
+static void cProbe (void* vprofID)
+{
+ if (_VAL == _IVAR1) _I64VAR1 ++;
+ _IVAR1 = _IVAR0;
+
+ if (_VAL == _IVAR0) _I64VAR0 ++;
+ _IVAR0 = (int) _VAL;
+
+ _DVAR0 = ((double)_I64VAR0) / _COUNT;
+ _DVAR1 = ((double)_I64VAR1) / _COUNT;
+}
+
+//__declspec (thread) boolean cv;
+//#define if(c) cv = (c); _vprof (cv); if (cv)
+//#define if(c) cv = (c); _vprof (cv, cProbe); if (cv)
+
+#define THREADS 1
+#define COUNT 100000
+#define SLEEPTIME 0
+
+static int64_t evens = 0;
+static int64_t odds = 0;
+
+void sub(int val)
+{
+ int i;
+ //_vprof (1);
+ for (i = 0; i < COUNT; i++) {
+ //_nvprof ("Iteration", 1);
+ //_nvprof ("Iteration", 1);
+ _vprof (i);
+ //_vprof (i);
+ //_hprof(i, 3, (int64_t) 1000, (int64_t)2000, (int64_t)3000);
+ //_hprof(i, 3, 10000, 10001, 3000000);
+ //_nhprof("Event", i, 3, 10000, 10001, 3000000);
+ //_nhprof("Event", i, 3, 10000, 10001, 3000000);
+ //Sleep(SLEEPTIME);
+ if (i % 2 == 0) {
+ //_vprof (i);
+ ////_hprof(i, 3, 10000, 10001, 3000000);
+ //_nvprof ("Iteration", i);
+ evens ++;
+ } else {
+ //_vprof (1);
+ _vprof (i, cProbe);
+ odds ++;
+ }
+ //_nvprof ("Iterate", 1);
+ }
+ //printf("sub %d done.\n", val);
+}
+
+HANDLE array[THREADS];
+
+static int run (void)
+{
+ int i;
+
+ time_t start_time = time(0);
+
+ for (i = 0; i < THREADS; i++) {
+ array[i] = CreateThread(0, 0, (LPTHREAD_START_ROUTINE)sub, (LPVOID)i, 0, 0);
+ }
+
+ for (i = 0; i < THREADS; i++) {
+ WaitForSingleObject(array[i], INFINITE);
+ }
+
+ return 0;
+}
+
+int main ()
+{
+ DWORD start, end;
+
+ start = GetTickCount ();
+ run ();
+ end = GetTickCount ();
+
+ printf ("\nRun took %d msecs\n\n", end-start);
+}
diff --git a/js/src/devtools/vprof/vprof.cpp b/js/src/devtools/vprof/vprof.cpp
new file mode 100644
index 000000000..e208d0405
--- /dev/null
+++ b/js/src/devtools/vprof/vprof.cpp
@@ -0,0 +1,354 @@
+/* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: t; tab-width: 4 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "VMPI.h"
+
+// Note, this is not supported in configurations with more than one AvmCore running
+// in the same process.
+
+#ifdef WIN32
+#include "windows.h"
+#else
+#define __cdecl
+#include <stdarg.h>
+#include <string.h>
+#endif
+
+#include "vprof.h"
+
+#ifndef MIN
+#define MIN(x,y) ((x) <= (y) ? x : y)
+#endif
+#ifndef MAX
+#define MAX(x,y) ((x) >= (y) ? x : y)
+#endif
+
+#ifndef MAXINT
+#define MAXINT int(unsigned(-1)>>1)
+#endif
+
+#ifndef MAXINT64
+#define MAXINT64 int64_t(uint64_t(-1)>>1)
+#endif
+
+#ifndef __STDC_WANT_SECURE_LIB__
+#define sprintf_s(b,size,fmt,...) sprintf((b),(fmt),__VA_ARGS__)
+#endif
+
+#if THREADED
+#define DO_LOCK(lock) Lock(lock); {
+#define DO_UNLOCK(lock) }; Unlock(lock)
+#else
+#define DO_LOCK(lock) { (void)(lock);
+#define DO_UNLOCK(lock) }
+#endif
+
+#if THREAD_SAFE
+#define LOCK(lock) DO_LOCK(lock)
+#define UNLOCK(lock) DO_UNLOCK(lock)
+#else
+#define LOCK(lock) { (void)(lock);
+#define UNLOCK(lock) }
+#endif
+
+static entry* entries = nullptr;
+static bool notInitialized = true;
+static long glock = LOCK_IS_FREE;
+
+#define Lock(lock) while (_InterlockedCompareExchange(lock, LOCK_IS_TAKEN, LOCK_IS_FREE) == LOCK_IS_TAKEN){};
+#define Unlock(lock) _InterlockedCompareExchange(lock, LOCK_IS_FREE, LOCK_IS_TAKEN);
+
+#if defined(WIN32)
+ static void vprof_printf(const char* format, ...)
+ {
+ va_list args;
+ va_start(args, format);
+
+ char buf[1024];
+ vsnprintf(buf, sizeof(buf), format, args);
+
+ va_end(args);
+
+ printf(buf);
+ ::OutputDebugStringA(buf);
+ }
+#else
+ #define vprof_printf printf
+#endif
+
+static inline entry* reverse (entry* s)
+{
+ entry_t e, n, p;
+
+ p = nullptr;
+ for (e = s; e; e = n) {
+ n = e->next;
+ e->next = p;
+ p = e;
+ }
+
+ return p;
+}
+
+static char* f (double d)
+{
+ static char s[80];
+ char* p;
+ sprintf_s (s, sizeof(s), "%lf", d);
+ p = s+VMPI_strlen(s)-1;
+ while (*p == '0') {
+ *p = '\0';
+ p--;
+ if (p == s) break;
+ }
+ if (*p == '.') *p = '\0';
+ return s;
+}
+
+static void dumpProfile (void)
+{
+ entry_t e;
+
+ entries = reverse(entries);
+ vprof_printf ("event avg [min : max] total count\n");
+ for (e = entries; e; e = e->next) {
+ if (e->count == 0) continue; // ignore entries with zero count.
+ vprof_printf ("%s", e->file);
+ if (e->line >= 0) {
+ vprof_printf (":%d", e->line);
+ }
+ vprof_printf (" %s [%lld : %lld] %lld %lld ",
+ f(((double)e->sum)/((double)e->count)), (long long int)e->min, (long long int)e->max, (long long int)e->sum, (long long int)e->count);
+ if (e->h) {
+ int j = MAXINT;
+ for (j = 0; j < e->h->nbins; j ++) {
+ vprof_printf ("(%lld < %lld) ", (long long int)e->h->count[j], (long long int)e->h->lb[j]);
+ }
+ vprof_printf ("(%lld >= %lld) ", (long long int)e->h->count[e->h->nbins], (long long int)e->h->lb[e->h->nbins-1]);
+ }
+ if (e->func) {
+ int j;
+ for (j = 0; j < NUM_EVARS; j++) {
+ if (e->ivar[j] != 0) {
+ vprof_printf ("IVAR%d %d ", j, e->ivar[j]);
+ }
+ }
+ for (j = 0; j < NUM_EVARS; j++) {
+ if (e->i64var[j] != 0) {
+ vprof_printf ("I64VAR%d %lld ", j, (long long int)e->i64var[j]);
+ }
+ }
+ for (j = 0; j < NUM_EVARS; j++) {
+ if (e->dvar[j] != 0) {
+ vprof_printf ("DVAR%d %lf ", j, e->dvar[j]);
+ }
+ }
+ }
+ vprof_printf ("\n");
+ }
+ entries = reverse(entries);
+}
+
+static inline entry_t findEntry (char* file, int line)
+{
+ for (entry_t e = entries; e; e = e->next) {
+ if ((e->line == line) && (VMPI_strcmp (e->file, file) == 0)) {
+ return e;
+ }
+ }
+ return nullptr;
+}
+
+// Initialize the location pointed to by 'id' to a new value profile entry
+// associated with 'file' and 'line', or do nothing if already initialized.
+// An optional final argument provides a user-defined probe function.
+
+int initValueProfile(void** id, char* file, int line, ...)
+{
+ DO_LOCK (&glock);
+ entry_t e = (entry_t) *id;
+ if (notInitialized) {
+ atexit (dumpProfile);
+ notInitialized = false;
+ }
+
+ if (e == nullptr) {
+ e = findEntry (file, line);
+ if (e) {
+ *id = e;
+ }
+ }
+
+ if (e == nullptr) {
+ va_list va;
+ e = (entry_t) malloc (sizeof(entry));
+ e->lock = LOCK_IS_FREE;
+ e->file = file;
+ e->line = line;
+ e->value = 0;
+ e->sum = 0;
+ e->count = 0;
+ e->min = 0;
+ e->max = 0;
+ // optional probe function argument
+ va_start (va, line);
+ e->func = (void (__cdecl*)(void*)) va_arg (va, void*);
+ va_end (va);
+ e->h = nullptr;
+ e->genptr = nullptr;
+ VMPI_memset (&e->ivar, 0, sizeof(e->ivar));
+ VMPI_memset (&e->i64var, 0, sizeof(e->i64var));
+ VMPI_memset (&e->dvar, 0, sizeof(e->dvar));
+ e->next = entries;
+ entries = e;
+ *id = e;
+ }
+ DO_UNLOCK (&glock);
+
+ return 0;
+}
+
+// Record a value profile event.
+
+int profileValue(void* id, int64_t value)
+{
+ entry_t e = (entry_t) id;
+ long* lock = &(e->lock);
+ LOCK (lock);
+ e->value = value;
+ if (e->count == 0) {
+ e->sum = value;
+ e->count = 1;
+ e->min = value;
+ e->max = value;
+ } else {
+ e->sum += value;
+ e->count ++;
+ e->min = MIN (e->min, value);
+ e->max = MAX (e->max, value);
+ }
+ if (e->func) e->func (e);
+ UNLOCK (lock);
+
+ return 0;
+}
+
+// Initialize the location pointed to by 'id' to a new histogram profile entry
+// associated with 'file' and 'line', or do nothing if already initialized.
+
+int initHistProfile(void** id, char* file, int line, int nbins, ...)
+{
+ DO_LOCK (&glock);
+ entry_t e = (entry_t) *id;
+ if (notInitialized) {
+ atexit (dumpProfile);
+ notInitialized = false;
+ }
+
+ if (e == nullptr) {
+ e = findEntry (file, line);
+ if (e) {
+ *id = e;
+ }
+ }
+
+ if (e == nullptr) {
+ va_list va;
+ hist_t h;
+ int b, n, s;
+ int64_t* lb;
+
+ e = (entry_t) malloc (sizeof(entry));
+ e->lock = LOCK_IS_FREE;
+ e->file = file;
+ e->line = line;
+ e->value = 0;
+ e->sum = 0;
+ e->count = 0;
+ e->min = 0;
+ e->max = 0;
+ e->func = nullptr;
+ e->h = h = (hist_t) malloc (sizeof(hist));
+ n = 1+MAX(nbins,0);
+ h->nbins = n-1;
+ s = n*sizeof(int64_t);
+ lb = (int64_t*) malloc (s);
+ h->lb = lb;
+ VMPI_memset (h->lb, 0, s);
+ h->count = (int64_t*) malloc (s);
+ VMPI_memset (h->count, 0, s);
+
+ va_start (va, nbins);
+ for (b = 0; b < nbins; b++) {
+ //lb[b] = va_arg (va, int64_t);
+ lb[b] = va_arg (va, int);
+ }
+ lb[b] = MAXINT64;
+ va_end (va);
+
+ e->genptr = nullptr;
+ VMPI_memset (&e->ivar, 0, sizeof(e->ivar));
+ VMPI_memset (&e->i64var, 0, sizeof(e->i64var));
+ VMPI_memset (&e->dvar, 0, sizeof(e->dvar));
+ e->next = entries;
+ entries = e;
+ *id = e;
+ }
+ DO_UNLOCK (&glock);
+
+ return 0;
+}
+
+// Record a histogram profile event.
+
+int histValue(void* id, int64_t value)
+{
+ entry_t e = (entry_t) id;
+ long* lock = &(e->lock);
+ hist_t h = e->h;
+ int nbins = h->nbins;
+ int64_t* lb = h->lb;
+ int b;
+
+ LOCK (lock);
+ e->value = value;
+ if (e->count == 0) {
+ e->sum = value;
+ e->count = 1;
+ e->min = value;
+ e->max = value;
+ } else {
+ e->sum += value;
+ e->count ++;
+ e->min = MIN (e->min, value);
+ e->max = MAX (e->max, value);
+ }
+ for (b = 0; b < nbins; b ++) {
+ if (value < lb[b]) break;
+ }
+ h->count[b] ++;
+ UNLOCK (lock);
+
+ return 0;
+}
+
+#if defined(_MSC_VER) && defined(_M_IX86)
+uint64_t readTimestampCounter()
+{
+ // read the cpu cycle counter. 1 tick = 1 cycle on IA32
+ _asm rdtsc;
+}
+#elif defined(__GNUC__) && (__i386__ || __x86_64__)
+uint64_t readTimestampCounter()
+{
+ uint32_t lo, hi;
+ __asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi));
+ return (uint64_t(hi) << 32) | lo;
+}
+#else
+// add stub for platforms without it, so fat builds don't fail
+uint64_t readTimestampCounter() { return 0; }
+#endif
+
diff --git a/js/src/devtools/vprof/vprof.h b/js/src/devtools/vprof/vprof.h
new file mode 100644
index 000000000..88a3391a2
--- /dev/null
+++ b/js/src/devtools/vprof/vprof.h
@@ -0,0 +1,275 @@
+/* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: t; tab-width: 4 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+//
+// Here are a few examples of using the value-profiling utility:
+//
+// _vprof (e);
+// at the end of program execution, you'll get a dump of the source location of this probe,
+// its min, max, average, the total sum of all instances of e, and the total number of times this probe was called.
+//
+// _vprof (x > 0);
+// shows how many times and what percentage of the cases x was > 0,
+// that is the probablitiy that x > 0.
+//
+// _vprof (n % 2 == 0);
+// shows how many times n was an even number
+// as well as th probablitiy of n being an even number.
+//
+// _hprof (n, 4, 1000, 5000, 5001, 10000);
+// gives you the histogram of n over the given 4 bucket boundaries:
+// # cases < 1000
+// # cases >= 1000 and < 5000
+// # cases >= 5000 and < 5001
+// # cases >= 5001 and < 10000
+// # cases >= 10000
+//
+// _nvprof ("event name", value);
+// all instances with the same name are merged
+// so, you can call _vprof with the same event name at difference places
+//
+// _vprof (e, myProbe);
+// value profile e and call myProbe (void* vprofID) at the profiling point.
+// inside the probe, the client has the predefined variables:
+// _VAL, _COUNT, _SUM, _MIN, _MAX, and the general purpose registers
+// _IVAR1, ..., IVAR4 general integer registrs
+// _I64VAR1, ..., I64VAR4 general integer64 registrs
+// _DVAR1, ..., _DVAR4 general double registers
+// _GENPTR a generic pointer that can be used by the client
+// the number of registers can be changed in vprof.h
+//
+
+#ifndef devtools_vprof_vprof_h
+#define devtools_vprof_vprof_h
+//
+// If the application for which you want to use vprof is threaded, THREADED must be defined as 1, otherwise define it as 0
+//
+// If your application is not threaded, define THREAD_SAFE 0,
+// otherwise, you have the option of setting THREAD_SAFE to 1 which results in exact counts or to 0 which results in a much more efficient but non-exact counts
+//
+#define THREADED 0
+#define THREAD_SAFE 0
+
+#include "VMPI.h"
+
+// Note, this is not supported in configurations with more than one AvmCore running
+// in the same process.
+
+// portable align macro
+#if defined(_MSC_VER)
+ #define vprof_align8(t) __declspec(align(8)) t
+#elif defined(__GNUC__)
+ #define vprof_align8(t) t __attribute__ ((aligned (8)))
+#elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
+ #define vprof_align8(t) t __attribute__ ((aligned (8)))
+#elif defined(VMCFG_SYMBIAN)
+ #define vprof_align8(t) t __attribute__ ((aligned (8)))
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int initValueProfile(void** id, char* file, int line, ...);
+int profileValue(void* id, int64_t value);
+int initHistProfile(void** id, char* file, int line, int nbins, ...);
+int histValue(void* id, int64_t value);
+uint64_t readTimestampCounter();
+
+#ifdef __cplusplus
+}
+#endif
+
+//#define DOPROF
+
+#ifndef DOPROF
+#define _nvprof(e,v)
+#ifndef VMCFG_SYMBIAN
+#define _vprof(v,...)
+#define _hprof(v,n,...)
+#define _nhprof(e,v,n,...)
+#define _ntprof_begin(e)
+#define _ntprof_end(e)
+#define _jvprof_init(id,...)
+#define _jnvprof_init(id,e,...)
+#define _jhprof_init(id,n,...)
+#define _jnhprof_init(id,e,n,...)
+#define _jvprof(id,v)
+#define _jhprof(id,v)
+#endif // ! VMCFG_SYMBIAN
+#else
+
+// Historical/compatibility note:
+// The macros below were originally written using conditional expressions, not if/else. The original author
+// said that this was done to allow _vprof and _nvprof to be used in an expression context, but the old code
+// had already wrapped the macro bodies in { }, so it is not clear how this could have worked. At present,
+// the profiling macros must appear in a statement context only.
+
+#define _vprof(v,...) \
+do { \
+ static void* id = 0; \
+ if (id == 0) \
+ initValueProfile(&id, __FILE__, __LINE__, ##__VA_ARGS__, NULL); \
+ profileValue(id, (int64_t) (v)); \
+} while (0)
+
+#define _nvprof(e,v) \
+do { \
+ static void* id = 0; \
+ if (id == 0) \
+ initValueProfile(&id, (char*) (e), -1, NULL); \
+ profileValue(id, (int64_t) (v)); \
+} while (0)
+
+#define _hprof(v,n,...) \
+do { \
+ static void* id = 0; \
+ if (id == 0) \
+ initHistProfile(&id, __FILE__, __LINE__, (int) (n), ##__VA_ARGS__); \
+ histValue(id, (int64_t) (v)); \
+} while (0)
+
+#define _nhprof(e,v,n,...) \
+do { \
+ static void* id = 0; \
+ if (id == 0) \
+ initHistProfile(&id, (char*) (e), -1, (int) (n), ##__VA_ARGS__); \
+ histValue(id, (int64_t) (v)); \
+} while (0)
+
+// Profile execution time between _ntprof_begin(e) and _ntprof_end(e).
+// The tag 'e' must match at the beginning and end of the region to
+// be timed. Regions may be nested or overlap arbitrarily, as it is
+// the tag alone that defines the begin/end correspondence.
+
+#define _ntprof_begin(e) \
+do { \
+ static void* id = 0; \
+ if (id == 0) \
+ initValueProfile(&id, (char*)(e), -1, NULL); \
+ ((entry_t)id)->i64var[0] = readTimestampCounter(); \
+} while (0)
+
+// Assume 2.6 Ghz CPU
+#define TICKS_PER_USEC 2600
+
+#define _ntprof_end(e) \
+do { \
+ static void* id = 0; \
+ uint64_t stop = readTimestampCounter(); \
+ if (id == 0) \
+ initValueProfile(&id, (char*)(e), -1, NULL); \
+ uint64_t start = ((entry_t)id)->i64var[0]; \
+ uint64_t usecs = (stop - start) / TICKS_PER_USEC; \
+ profileValue(id, usecs); \
+} while (0)
+
+// These macros separate the creation of a profile record from its later usage.
+// They are intended for profiling JIT-generated code. Once created, the JIT can
+// bind a pointer to the profile record into the generated code, which can then
+// record profile events during execution.
+
+#define _jvprof_init(id,...) \
+ if (*(id) == 0) \
+ initValueProfile((id), __FILE__, __LINE__, ##__VA_ARGS__, NULL)
+
+#define _jnvprof_init(id,e,...) \
+ if (*(id) == 0) \
+ initValueProfile((id), (char*) (e), -1, ##__VA_ARGS__, NULL)
+
+#define _jhprof_init(id,n,...) \
+ if (*(id) == 0) \
+ initHistProfile((id), __FILE__, __LINE__, (int) (n), ##__VA_ARGS__)
+
+#define _jnhprof_init(id,e,n,...) \
+ if (*(id) == 0) \
+ initHistProfile((id), (char*) (e), -1, (int) (n), ##__VA_ARGS__)
+
+// Calls to the _jvprof and _jhprof macros must be wrapped in a non-inline
+// function in order to be invoked from JIT-compiled code.
+
+#define _jvprof(id,v) \
+ profileValue((id), (int64_t) (v))
+
+#define _jhprof(id,v) \
+ histValue((id), (int64_t) (v))
+
+#endif
+
+#define NUM_EVARS 4
+
+enum {
+ LOCK_IS_FREE = 0,
+ LOCK_IS_TAKEN = 1
+};
+
+extern
+#ifdef __cplusplus
+"C"
+#endif
+long _InterlockedCompareExchange (
+ long volatile * Destination,
+ long Exchange,
+ long Comperand
+);
+
+typedef struct hist hist;
+
+typedef struct hist {
+ int nbins;
+ int64_t* lb;
+ int64_t* count;
+} *hist_t;
+
+typedef struct entry entry;
+
+typedef struct entry {
+ long lock;
+ char* file;
+ int line;
+ int64_t value;
+ int64_t count;
+ int64_t sum;
+ int64_t min;
+ int64_t max;
+ void (*func)(void*);
+ hist* h;
+
+ entry* next;
+
+ // exposed to the clients
+ void* genptr;
+ int ivar[NUM_EVARS];
+ vprof_align8(int64_t) i64var[NUM_EVARS];
+ vprof_align8(double) dvar[NUM_EVARS];
+ //
+
+ char pad[128]; // avoid false sharing
+} *entry_t;
+
+#define _VAL ((entry_t)vprofID)->value
+#define _COUNT ((entry_t)vprofID)->count
+#define _SUM ((entry_t)vprofID)->sum
+#define _MIN ((entry_t)vprofID)->min
+#define _MAX ((entry_t)vprofID)->max
+
+#define _GENPTR ((entry_t)vprofID)->genptr
+
+#define _IVAR0 ((entry_t)vprofID)->ivar[0]
+#define _IVAR1 ((entry_t)vprofID)->ivar[1]
+#define _IVAR2 ((entry_t)vprofID)->ivar[2]
+#define _IVAR3 ((entry_t)vprofID)->ivar[3]
+
+#define _I64VAR0 ((entry_t)vprofID)->i64var[0]
+#define _I64VAR1 ((entry_t)vprofID)->i64var[1]
+#define _I64VAR2 ((entry_t)vprofID)->i64var[2]
+#define _I64VAR3 ((entry_t)vprofID)->i64var[3]
+
+#define _DVAR0 ((entry_t)vprofID)->dvar[0]
+#define _DVAR1 ((entry_t)vprofID)->dvar[1]
+#define _DVAR2 ((entry_t)vprofID)->dvar[2]
+#define _DVAR3 ((entry_t)vprofID)->dvar[3]
+
+#endif /* devtools_vprof_vprof_h */