diff options
Diffstat (limited to 'python/mozbuild/mozpack')
36 files changed, 10136 insertions, 0 deletions
diff --git a/python/mozbuild/mozpack/__init__.py b/python/mozbuild/mozpack/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/python/mozbuild/mozpack/__init__.py diff --git a/python/mozbuild/mozpack/archive.py b/python/mozbuild/mozpack/archive.py new file mode 100644 index 000000000..f3015ff21 --- /dev/null +++ b/python/mozbuild/mozpack/archive.py @@ -0,0 +1,107 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import bz2 +import gzip +import stat +import tarfile + + +# 2016-01-01T00:00:00+0000 +DEFAULT_MTIME = 1451606400 + + +def create_tar_from_files(fp, files): + """Create a tar file deterministically. + + Receives a dict mapping names of files in the archive to local filesystem + paths. + + The files will be archived and written to the passed file handle opened + for writing. + + Only regular files can be written. + + FUTURE accept mozpack.files classes for writing + FUTURE accept a filename argument (or create APIs to write files) + """ + with tarfile.open(name='', mode='w', fileobj=fp, dereference=True) as tf: + for archive_path, fs_path in sorted(files.items()): + ti = tf.gettarinfo(fs_path, archive_path) + + if not ti.isreg(): + raise ValueError('not a regular file: %s' % fs_path) + + # Disallow setuid and setgid bits. This is an arbitrary restriction. + # However, since we set uid/gid to root:root, setuid and setgid + # would be a glaring security hole if the archive were + # uncompressed as root. + if ti.mode & (stat.S_ISUID | stat.S_ISGID): + raise ValueError('cannot add file with setuid or setgid set: ' + '%s' % fs_path) + + # Set uid, gid, username, and group as deterministic values. + ti.uid = 0 + ti.gid = 0 + ti.uname = '' + ti.gname = '' + + # Set mtime to a constant value. + ti.mtime = DEFAULT_MTIME + + with open(fs_path, 'rb') as fh: + tf.addfile(ti, fh) + + +def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9): + """Create a tar.gz file deterministically from files. + + This is a glorified wrapper around ``create_tar_from_files`` that + adds gzip compression. + + The passed file handle should be opened for writing in binary mode. + When the function returns, all data has been written to the handle. + """ + # Offset 3-7 in the gzip header contains an mtime. Pin it to a known + # value so output is deterministic. + gf = gzip.GzipFile(filename=filename or '', mode='wb', fileobj=fp, + compresslevel=compresslevel, mtime=DEFAULT_MTIME) + with gf: + create_tar_from_files(gf, files) + + +class _BZ2Proxy(object): + """File object that proxies writes to a bz2 compressor.""" + def __init__(self, fp, compresslevel=9): + self.fp = fp + self.compressor = bz2.BZ2Compressor(compresslevel=compresslevel) + self.pos = 0 + + def tell(self): + return self.pos + + def write(self, data): + data = self.compressor.compress(data) + self.pos += len(data) + self.fp.write(data) + + def close(self): + data = self.compressor.flush() + self.pos += len(data) + self.fp.write(data) + + +def create_tar_bz2_from_files(fp, files, compresslevel=9): + """Create a tar.bz2 file deterministically from files. + + This is a glorified wrapper around ``create_tar_from_files`` that + adds bzip2 compression. + + This function is similar to ``create_tar_gzip_from_files()``. + """ + proxy = _BZ2Proxy(fp, compresslevel=compresslevel) + create_tar_from_files(proxy, files) + proxy.close() diff --git a/python/mozbuild/mozpack/chrome/__init__.py b/python/mozbuild/mozpack/chrome/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/python/mozbuild/mozpack/chrome/__init__.py diff --git a/python/mozbuild/mozpack/chrome/flags.py b/python/mozbuild/mozpack/chrome/flags.py new file mode 100644 index 000000000..8c5c9a54c --- /dev/null +++ b/python/mozbuild/mozpack/chrome/flags.py @@ -0,0 +1,258 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import re +from distutils.version import LooseVersion +from mozpack.errors import errors +from collections import OrderedDict + + +class Flag(object): + ''' + Class for flags in manifest entries in the form: + "flag" (same as "flag=true") + "flag=yes|true|1" + "flag=no|false|0" + ''' + def __init__(self, name): + ''' + Initialize a Flag with the given name. + ''' + self.name = name + self.value = None + + def add_definition(self, definition): + ''' + Add a flag value definition. Replaces any previously set value. + ''' + if definition == self.name: + self.value = True + return + assert(definition.startswith(self.name)) + if definition[len(self.name)] != '=': + return errors.fatal('Malformed flag: %s' % definition) + value = definition[len(self.name) + 1:] + if value in ('yes', 'true', '1', 'no', 'false', '0'): + self.value = value + else: + return errors.fatal('Unknown value in: %s' % definition) + + def matches(self, value): + ''' + Return whether the flag value matches the given value. The values + are canonicalized for comparison. + ''' + if value in ('yes', 'true', '1', True): + return self.value in ('yes', 'true', '1', True) + if value in ('no', 'false', '0', False): + return self.value in ('no', 'false', '0', False, None) + raise RuntimeError('Invalid value: %s' % value) + + def __str__(self): + ''' + Serialize the flag value in the same form given to the last + add_definition() call. + ''' + if self.value is None: + return '' + if self.value is True: + return self.name + return '%s=%s' % (self.name, self.value) + + +class StringFlag(object): + ''' + Class for string flags in manifest entries in the form: + "flag=string" + "flag!=string" + ''' + def __init__(self, name): + ''' + Initialize a StringFlag with the given name. + ''' + self.name = name + self.values = [] + + def add_definition(self, definition): + ''' + Add a string flag definition. + ''' + assert(definition.startswith(self.name)) + value = definition[len(self.name):] + if value.startswith('='): + self.values.append(('==', value[1:])) + elif value.startswith('!='): + self.values.append(('!=', value[2:])) + else: + return errors.fatal('Malformed flag: %s' % definition) + + def matches(self, value): + ''' + Return whether one of the string flag definitions matches the given + value. + For example, + flag = StringFlag('foo') + flag.add_definition('foo!=bar') + flag.matches('bar') returns False + flag.matches('qux') returns True + flag = StringFlag('foo') + flag.add_definition('foo=bar') + flag.add_definition('foo=baz') + flag.matches('bar') returns True + flag.matches('baz') returns True + flag.matches('qux') returns False + ''' + if not self.values: + return True + for comparison, val in self.values: + if eval('value %s val' % comparison): + return True + return False + + def __str__(self): + ''' + Serialize the flag definitions in the same form given to each + add_definition() call. + ''' + res = [] + for comparison, val in self.values: + if comparison == '==': + res.append('%s=%s' % (self.name, val)) + else: + res.append('%s!=%s' % (self.name, val)) + return ' '.join(res) + + +class VersionFlag(object): + ''' + Class for version flags in manifest entries in the form: + "flag=version" + "flag<=version" + "flag<version" + "flag>=version" + "flag>version" + ''' + def __init__(self, name): + ''' + Initialize a VersionFlag with the given name. + ''' + self.name = name + self.values = [] + + def add_definition(self, definition): + ''' + Add a version flag definition. + ''' + assert(definition.startswith(self.name)) + value = definition[len(self.name):] + if value.startswith('='): + self.values.append(('==', LooseVersion(value[1:]))) + elif len(value) > 1 and value[0] in ['<', '>']: + if value[1] == '=': + if len(value) < 3: + return errors.fatal('Malformed flag: %s' % definition) + self.values.append((value[0:2], LooseVersion(value[2:]))) + else: + self.values.append((value[0], LooseVersion(value[1:]))) + else: + return errors.fatal('Malformed flag: %s' % definition) + + def matches(self, value): + ''' + Return whether one of the version flag definitions matches the given + value. + For example, + flag = VersionFlag('foo') + flag.add_definition('foo>=1.0') + flag.matches('1.0') returns True + flag.matches('1.1') returns True + flag.matches('0.9') returns False + flag = VersionFlag('foo') + flag.add_definition('foo>=1.0') + flag.add_definition('foo<0.5') + flag.matches('0.4') returns True + flag.matches('1.0') returns True + flag.matches('0.6') returns False + ''' + value = LooseVersion(value) + if not self.values: + return True + for comparison, val in self.values: + if eval('value %s val' % comparison): + return True + return False + + def __str__(self): + ''' + Serialize the flag definitions in the same form given to each + add_definition() call. + ''' + res = [] + for comparison, val in self.values: + if comparison == '==': + res.append('%s=%s' % (self.name, val)) + else: + res.append('%s%s%s' % (self.name, comparison, val)) + return ' '.join(res) + + +class Flags(OrderedDict): + ''' + Class to handle a set of flags definitions given on a single manifest + entry. + ''' + FLAGS = { + 'application': StringFlag, + 'appversion': VersionFlag, + 'platformversion': VersionFlag, + 'contentaccessible': Flag, + 'os': StringFlag, + 'osversion': VersionFlag, + 'abi': StringFlag, + 'platform': Flag, + 'xpcnativewrappers': Flag, + 'tablet': Flag, + 'process': StringFlag, + } + RE = re.compile(r'([!<>=]+)') + + def __init__(self, *flags): + ''' + Initialize a set of flags given in string form. + flags = Flags('contentaccessible=yes', 'appversion>=3.5') + ''' + OrderedDict.__init__(self) + for f in flags: + name = self.RE.split(f) + name = name[0] + if not name in self.FLAGS: + errors.fatal('Unknown flag: %s' % name) + continue + if not name in self: + self[name] = self.FLAGS[name](name) + self[name].add_definition(f) + + def __str__(self): + ''' + Serialize the set of flags. + ''' + return ' '.join(str(self[k]) for k in self) + + def match(self, **filter): + ''' + Return whether the set of flags match the set of given filters. + flags = Flags('contentaccessible=yes', 'appversion>=3.5', + 'application=foo') + flags.match(application='foo') returns True + flags.match(application='foo', appversion='3.5') returns True + flags.match(application='foo', appversion='3.0') returns False + ''' + for name, value in filter.iteritems(): + if not name in self: + continue + if not self[name].matches(value): + return False + return True diff --git a/python/mozbuild/mozpack/chrome/manifest.py b/python/mozbuild/mozpack/chrome/manifest.py new file mode 100644 index 000000000..71241764d --- /dev/null +++ b/python/mozbuild/mozpack/chrome/manifest.py @@ -0,0 +1,368 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import re +import os +from urlparse import urlparse +import mozpack.path as mozpath +from mozpack.chrome.flags import Flags +from mozpack.errors import errors + + +class ManifestEntry(object): + ''' + Base class for all manifest entry types. + Subclasses may define the following class or member variables: + - localized: indicates whether the manifest entry is used for localized + data. + - type: the manifest entry type (e.g. 'content' in + 'content global content/global/') + - allowed_flags: a set of flags allowed to be defined for the given + manifest entry type. + + A manifest entry is attached to a base path, defining where the manifest + entry is bound to, and that is used to find relative paths defined in + entries. + ''' + localized = False + type = None + allowed_flags = [ + 'application', + 'platformversion', + 'os', + 'osversion', + 'abi', + 'xpcnativewrappers', + 'tablet', + 'process', + ] + + def __init__(self, base, *flags): + ''' + Initialize a manifest entry with the given base path and flags. + ''' + self.base = base + self.flags = Flags(*flags) + if not all(f in self.allowed_flags for f in self.flags): + errors.fatal('%s unsupported for %s manifest entries' % + (','.join(f for f in self.flags + if not f in self.allowed_flags), self.type)) + + def serialize(self, *args): + ''' + Serialize the manifest entry. + ''' + entry = [self.type] + list(args) + flags = str(self.flags) + if flags: + entry.append(flags) + return ' '.join(entry) + + def __eq__(self, other): + return self.base == other.base and str(self) == str(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return '<%s@%s>' % (str(self), self.base) + + def move(self, base): + ''' + Return a new manifest entry with a different base path. + ''' + return parse_manifest_line(base, str(self)) + + def rebase(self, base): + ''' + Return a new manifest entry with all relative paths defined in the + entry relative to a new base directory. + The base class doesn't define relative paths, so it is equivalent to + move(). + ''' + return self.move(base) + + +class ManifestEntryWithRelPath(ManifestEntry): + ''' + Abstract manifest entry type with a relative path definition. + ''' + def __init__(self, base, relpath, *flags): + ManifestEntry.__init__(self, base, *flags) + self.relpath = relpath + + def __str__(self): + return self.serialize(self.relpath) + + def rebase(self, base): + ''' + Return a new manifest entry with all relative paths defined in the + entry relative to a new base directory. + ''' + clone = ManifestEntry.rebase(self, base) + clone.relpath = mozpath.rebase(self.base, base, self.relpath) + return clone + + @property + def path(self): + return mozpath.normpath(mozpath.join(self.base, + self.relpath)) + + +class Manifest(ManifestEntryWithRelPath): + ''' + Class for 'manifest' entries. + manifest some/path/to/another.manifest + ''' + type = 'manifest' + + +class ManifestChrome(ManifestEntryWithRelPath): + ''' + Abstract class for chrome entries. + ''' + def __init__(self, base, name, relpath, *flags): + ManifestEntryWithRelPath.__init__(self, base, relpath, *flags) + self.name = name + + @property + def location(self): + return mozpath.join(self.base, self.relpath) + + +class ManifestContent(ManifestChrome): + ''' + Class for 'content' entries. + content global content/global/ + ''' + type = 'content' + allowed_flags = ManifestChrome.allowed_flags + [ + 'contentaccessible', + 'platform', + ] + + def __str__(self): + return self.serialize(self.name, self.relpath) + + +class ManifestMultiContent(ManifestChrome): + ''' + Abstract class for chrome entries with multiple definitions. + Used for locale and skin entries. + ''' + type = None + + def __init__(self, base, name, id, relpath, *flags): + ManifestChrome.__init__(self, base, name, relpath, *flags) + self.id = id + + def __str__(self): + return self.serialize(self.name, self.id, self.relpath) + + +class ManifestLocale(ManifestMultiContent): + ''' + Class for 'locale' entries. + locale global en-US content/en-US/ + locale global fr content/fr/ + ''' + localized = True + type = 'locale' + + +class ManifestSkin(ManifestMultiContent): + ''' + Class for 'skin' entries. + skin global classic/1.0 content/skin/classic/ + ''' + type = 'skin' + + +class ManifestOverload(ManifestEntry): + ''' + Abstract class for chrome entries defining some kind of overloading. + Used for overlay, override or style entries. + ''' + type = None + + def __init__(self, base, overloaded, overload, *flags): + ManifestEntry.__init__(self, base, *flags) + self.overloaded = overloaded + self.overload = overload + + def __str__(self): + return self.serialize(self.overloaded, self.overload) + + @property + def localized(self): + u = urlparse(self.overload) + return u.scheme == 'chrome' and \ + u.path.split('/')[0:2] == ['', 'locale'] + + +class ManifestOverlay(ManifestOverload): + ''' + Class for 'overlay' entries. + overlay chrome://global/content/viewSource.xul \ + chrome://browser/content/viewSourceOverlay.xul + ''' + type = 'overlay' + + +class ManifestStyle(ManifestOverload): + ''' + Class for 'style' entries. + style chrome://global/content/customizeToolbar.xul \ + chrome://browser/skin/ + ''' + type = 'style' + + +class ManifestOverride(ManifestOverload): + ''' + Class for 'override' entries. + override chrome://global/locale/netError.dtd \ + chrome://browser/locale/netError.dtd + ''' + type = 'override' + + +class ManifestResource(ManifestEntry): + ''' + Class for 'resource' entries. + resource gre-resources toolkit/res/ + resource services-sync resource://gre/modules/services-sync/ + + The target may be a relative path or a resource or chrome url. + ''' + type = 'resource' + + def __init__(self, base, name, target, *flags): + ManifestEntry.__init__(self, base, *flags) + self.name = name + self.target = target + + def __str__(self): + return self.serialize(self.name, self.target) + + def rebase(self, base): + u = urlparse(self.target) + if u.scheme and u.scheme != 'jar': + return ManifestEntry.rebase(self, base) + clone = ManifestEntry.rebase(self, base) + clone.target = mozpath.rebase(self.base, base, self.target) + return clone + + +class ManifestBinaryComponent(ManifestEntryWithRelPath): + ''' + Class for 'binary-component' entries. + binary-component some/path/to/a/component.dll + ''' + type = 'binary-component' + + +class ManifestComponent(ManifestEntryWithRelPath): + ''' + Class for 'component' entries. + component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js + ''' + type = 'component' + + def __init__(self, base, cid, file, *flags): + ManifestEntryWithRelPath.__init__(self, base, file, *flags) + self.cid = cid + + def __str__(self): + return self.serialize(self.cid, self.relpath) + + +class ManifestInterfaces(ManifestEntryWithRelPath): + ''' + Class for 'interfaces' entries. + interfaces foo.xpt + ''' + type = 'interfaces' + + +class ManifestCategory(ManifestEntry): + ''' + Class for 'category' entries. + category command-line-handler m-browser @mozilla.org/browser/clh; + ''' + type = 'category' + + def __init__(self, base, category, name, value, *flags): + ManifestEntry.__init__(self, base, *flags) + self.category = category + self.name = name + self.value = value + + def __str__(self): + return self.serialize(self.category, self.name, self.value) + + +class ManifestContract(ManifestEntry): + ''' + Class for 'contract' entries. + contract @mozilla.org/foo;1 {b2bba4df-057d-41ea-b6b1-94a10a8ede68} + ''' + type = 'contract' + + def __init__(self, base, contractID, cid, *flags): + ManifestEntry.__init__(self, base, *flags) + self.contractID = contractID + self.cid = cid + + def __str__(self): + return self.serialize(self.contractID, self.cid) + +# All manifest classes by their type name. +MANIFESTS_TYPES = dict([(c.type, c) for c in globals().values() + if type(c) == type and issubclass(c, ManifestEntry) + and hasattr(c, 'type') and c.type]) + +MANIFEST_RE = re.compile(r'^#.*$') + + +def parse_manifest_line(base, line): + ''' + Parse a line from a manifest file with the given base directory and + return the corresponding ManifestEntry instance. + ''' + # Remove comments + cmd = MANIFEST_RE.sub('', line).strip().split() + if not cmd: + return None + if not cmd[0] in MANIFESTS_TYPES: + return errors.fatal('Unknown manifest directive: %s' % cmd[0]) + return MANIFESTS_TYPES[cmd[0]](base, *cmd[1:]) + + +def parse_manifest(root, path, fileobj=None): + ''' + Parse a manifest file. + ''' + base = mozpath.dirname(path) + if root: + path = os.path.normpath(os.path.abspath(os.path.join(root, path))) + if not fileobj: + fileobj = open(path) + linenum = 0 + for line in fileobj: + linenum += 1 + with errors.context(path, linenum): + e = parse_manifest_line(base, line) + if e: + yield e + + +def is_manifest(path): + ''' + Return whether the given path is that of a manifest file. + ''' + return path.endswith('.manifest') and not path.endswith('.CRT.manifest') \ + and not path.endswith('.exe.manifest') diff --git a/python/mozbuild/mozpack/copier.py b/python/mozbuild/mozpack/copier.py new file mode 100644 index 000000000..386930fe7 --- /dev/null +++ b/python/mozbuild/mozpack/copier.py @@ -0,0 +1,568 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import os +import stat +import sys + +from mozpack.errors import errors +from mozpack.files import ( + BaseFile, + Dest, +) +import mozpack.path as mozpath +import errno +from collections import ( + Counter, + OrderedDict, +) +import concurrent.futures as futures + + +class FileRegistry(object): + ''' + Generic container to keep track of a set of BaseFile instances. It + preserves the order under which the files are added, but doesn't keep + track of empty directories (directories are not stored at all). + The paths associated with the BaseFile instances are relative to an + unspecified (virtual) root directory. + + registry = FileRegistry() + registry.add('foo/bar', file_instance) + ''' + + def __init__(self): + self._files = OrderedDict() + self._required_directories = Counter() + self._partial_paths_cache = {} + + def _partial_paths(self, path): + ''' + Turn "foo/bar/baz/zot" into ["foo/bar/baz", "foo/bar", "foo"]. + ''' + dir_name = path.rpartition('/')[0] + if not dir_name: + return [] + + partial_paths = self._partial_paths_cache.get(dir_name) + if partial_paths: + return partial_paths + + partial_paths = [dir_name] + self._partial_paths(dir_name) + + self._partial_paths_cache[dir_name] = partial_paths + return partial_paths + + def add(self, path, content): + ''' + Add a BaseFile instance to the container, under the given path. + ''' + assert isinstance(content, BaseFile) + if path in self._files: + return errors.error("%s already added" % path) + if self._required_directories[path] > 0: + return errors.error("Can't add %s: it is a required directory" % + path) + # Check whether any parent of the given path is already stored + partial_paths = self._partial_paths(path) + for partial_path in partial_paths: + if partial_path in self._files: + return errors.error("Can't add %s: %s is a file" % + (path, partial_path)) + self._files[path] = content + self._required_directories.update(partial_paths) + + def match(self, pattern): + ''' + Return the list of paths, stored in the container, matching the + given pattern. See the mozpack.path.match documentation for a + description of the handled patterns. + ''' + if '*' in pattern: + return [p for p in self.paths() + if mozpath.match(p, pattern)] + if pattern == '': + return self.paths() + if pattern in self._files: + return [pattern] + return [p for p in self.paths() + if mozpath.basedir(p, [pattern]) == pattern] + + def remove(self, pattern): + ''' + Remove paths matching the given pattern from the container. See the + mozpack.path.match documentation for a description of the handled + patterns. + ''' + items = self.match(pattern) + if not items: + return errors.error("Can't remove %s: %s" % (pattern, + "not matching anything previously added")) + for i in items: + del self._files[i] + self._required_directories.subtract(self._partial_paths(i)) + + def paths(self): + ''' + Return all paths stored in the container, in the order they were added. + ''' + return self._files.keys() + + def __len__(self): + ''' + Return number of paths stored in the container. + ''' + return len(self._files) + + def __contains__(self, pattern): + raise RuntimeError("'in' operator forbidden for %s. Use contains()." % + self.__class__.__name__) + + def contains(self, pattern): + ''' + Return whether the container contains paths matching the given + pattern. See the mozpack.path.match documentation for a description of + the handled patterns. + ''' + return len(self.match(pattern)) > 0 + + def __getitem__(self, path): + ''' + Return the BaseFile instance stored in the container for the given + path. + ''' + return self._files[path] + + def __iter__(self): + ''' + Iterate over all (path, BaseFile instance) pairs from the container. + for path, file in registry: + (...) + ''' + return self._files.iteritems() + + def required_directories(self): + ''' + Return the set of directories required by the paths in the container, + in no particular order. The returned directories are relative to an + unspecified (virtual) root directory (and do not include said root + directory). + ''' + return set(k for k, v in self._required_directories.items() if v > 0) + + +class FileRegistrySubtree(object): + '''A proxy class to give access to a subtree of an existing FileRegistry. + + Note this doesn't implement the whole FileRegistry interface.''' + def __new__(cls, base, registry): + if not base: + return registry + return object.__new__(cls) + + def __init__(self, base, registry): + self._base = base + self._registry = registry + + def _get_path(self, path): + # mozpath.join will return a trailing slash if path is empty, and we + # don't want that. + return mozpath.join(self._base, path) if path else self._base + + def add(self, path, content): + return self._registry.add(self._get_path(path), content) + + def match(self, pattern): + return [mozpath.relpath(p, self._base) + for p in self._registry.match(self._get_path(pattern))] + + def remove(self, pattern): + return self._registry.remove(self._get_path(pattern)) + + def paths(self): + return [p for p, f in self] + + def __len__(self): + return len(self.paths()) + + def contains(self, pattern): + return self._registry.contains(self._get_path(pattern)) + + def __getitem__(self, path): + return self._registry[self._get_path(path)] + + def __iter__(self): + for p, f in self._registry: + if mozpath.basedir(p, [self._base]): + yield mozpath.relpath(p, self._base), f + + +class FileCopyResult(object): + """Represents results of a FileCopier.copy operation.""" + + def __init__(self): + self.updated_files = set() + self.existing_files = set() + self.removed_files = set() + self.removed_directories = set() + + @property + def updated_files_count(self): + return len(self.updated_files) + + @property + def existing_files_count(self): + return len(self.existing_files) + + @property + def removed_files_count(self): + return len(self.removed_files) + + @property + def removed_directories_count(self): + return len(self.removed_directories) + + +class FileCopier(FileRegistry): + ''' + FileRegistry with the ability to copy the registered files to a separate + directory. + ''' + def copy(self, destination, skip_if_older=True, + remove_unaccounted=True, + remove_all_directory_symlinks=True, + remove_empty_directories=True): + ''' + Copy all registered files to the given destination path. The given + destination can be an existing directory, or not exist at all. It + can't be e.g. a file. + The copy process acts a bit like rsync: files are not copied when they + don't need to (see mozpack.files for details on file.copy). + + By default, files in the destination directory that aren't + registered are removed and empty directories are deleted. In + addition, all directory symlinks in the destination directory + are deleted: this is a conservative approach to ensure that we + never accidently write files into a directory that is not the + destination directory. In the worst case, we might have a + directory symlink in the object directory to the source + directory. + + To disable removing of unregistered files, pass + remove_unaccounted=False. To disable removing empty + directories, pass remove_empty_directories=False. In rare + cases, you might want to maintain directory symlinks in the + destination directory (at least those that are not required to + be regular directories): pass + remove_all_directory_symlinks=False. Exercise caution with + this flag: you almost certainly do not want to preserve + directory symlinks. + + Returns a FileCopyResult that details what changed. + ''' + assert isinstance(destination, basestring) + assert not os.path.exists(destination) or os.path.isdir(destination) + + result = FileCopyResult() + have_symlinks = hasattr(os, 'symlink') + destination = os.path.normpath(destination) + + # We create the destination directory specially. We can't do this as + # part of the loop doing mkdir() below because that loop munges + # symlinks and permissions and parent directories of the destination + # directory may have their own weird schema. The contract is we only + # manage children of destination, not its parents. + try: + os.makedirs(destination) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + # Because we could be handling thousands of files, code in this + # function is optimized to minimize system calls. We prefer CPU time + # in Python over possibly I/O bound filesystem calls to stat() and + # friends. + + required_dirs = set([destination]) + required_dirs |= set(os.path.normpath(os.path.join(destination, d)) + for d in self.required_directories()) + + # Ensure destination directories are in place and proper. + # + # The "proper" bit is important. We need to ensure that directories + # have appropriate permissions or we will be unable to discover + # and write files. Furthermore, we need to verify directories aren't + # symlinks. + # + # Symlinked directories (a symlink whose target is a directory) are + # incompatible with us because our manifest talks in terms of files, + # not directories. If we leave symlinked directories unchecked, we + # would blindly follow symlinks and this might confuse file + # installation. For example, if an existing directory is a symlink + # to directory X and we attempt to install a symlink in this directory + # to a file in directory X, we may create a recursive symlink! + for d in sorted(required_dirs, key=len): + try: + os.mkdir(d) + except OSError as error: + if error.errno != errno.EEXIST: + raise + + # We allow the destination to be a symlink because the caller + # is responsible for managing the destination and we assume + # they know what they are doing. + if have_symlinks and d != destination: + st = os.lstat(d) + if stat.S_ISLNK(st.st_mode): + # While we have remove_unaccounted, it doesn't apply + # to directory symlinks because if it did, our behavior + # could be very wrong. + os.remove(d) + os.mkdir(d) + + if not os.access(d, os.W_OK): + umask = os.umask(0o077) + os.umask(umask) + os.chmod(d, 0o777 & ~umask) + + if isinstance(remove_unaccounted, FileRegistry): + existing_files = set(os.path.normpath(os.path.join(destination, p)) + for p in remove_unaccounted.paths()) + existing_dirs = set(os.path.normpath(os.path.join(destination, p)) + for p in remove_unaccounted + .required_directories()) + existing_dirs |= {os.path.normpath(destination)} + else: + # While we have remove_unaccounted, it doesn't apply to empty + # directories because it wouldn't make sense: an empty directory + # is empty, so removing it should have no effect. + existing_dirs = set() + existing_files = set() + for root, dirs, files in os.walk(destination): + # We need to perform the same symlink detection as above. + # os.walk() doesn't follow symlinks into directories by + # default, so we need to check dirs (we can't wait for root). + if have_symlinks: + filtered = [] + for d in dirs: + full = os.path.join(root, d) + st = os.lstat(full) + if stat.S_ISLNK(st.st_mode): + # This directory symlink is not a required + # directory: any such symlink would have been + # removed and a directory created above. + if remove_all_directory_symlinks: + os.remove(full) + result.removed_files.add( + os.path.normpath(full)) + else: + existing_files.add(os.path.normpath(full)) + else: + filtered.append(d) + + dirs[:] = filtered + + existing_dirs.add(os.path.normpath(root)) + + for d in dirs: + existing_dirs.add(os.path.normpath(os.path.join(root, d))) + + for f in files: + existing_files.add(os.path.normpath(os.path.join(root, f))) + + # Now we reconcile the state of the world against what we want. + dest_files = set() + + # Install files. + # + # Creating/appending new files on Windows/NTFS is slow. So we use a + # thread pool to speed it up significantly. The performance of this + # loop is so critical to common build operations on Linux that the + # overhead of the thread pool is worth avoiding, so we have 2 code + # paths. We also employ a low water mark to prevent thread pool + # creation if number of files is too small to benefit. + copy_results = [] + if sys.platform == 'win32' and len(self) > 100: + with futures.ThreadPoolExecutor(4) as e: + fs = [] + for p, f in self: + destfile = os.path.normpath(os.path.join(destination, p)) + fs.append((destfile, e.submit(f.copy, destfile, skip_if_older))) + + copy_results = [(destfile, f.result) for destfile, f in fs] + else: + for p, f in self: + destfile = os.path.normpath(os.path.join(destination, p)) + copy_results.append((destfile, f.copy(destfile, skip_if_older))) + + for destfile, copy_result in copy_results: + dest_files.add(destfile) + if copy_result: + result.updated_files.add(destfile) + else: + result.existing_files.add(destfile) + + # Remove files no longer accounted for. + if remove_unaccounted: + for f in existing_files - dest_files: + # Windows requires write access to remove files. + if os.name == 'nt' and not os.access(f, os.W_OK): + # It doesn't matter what we set permissions to since we + # will remove this file shortly. + os.chmod(f, 0o600) + + os.remove(f) + result.removed_files.add(f) + + if not remove_empty_directories: + return result + + # Figure out which directories can be removed. This is complicated + # by the fact we optionally remove existing files. This would be easy + # if we walked the directory tree after installing files. But, we're + # trying to minimize system calls. + + # Start with the ideal set. + remove_dirs = existing_dirs - required_dirs + + # Then don't remove directories if we didn't remove unaccounted files + # and one of those files exists. + if not remove_unaccounted: + parents = set() + pathsep = os.path.sep + for f in existing_files: + path = f + while True: + # All the paths are normalized and relative by this point, + # so os.path.dirname would only do extra work. + dirname = path.rpartition(pathsep)[0] + if dirname in parents: + break + parents.add(dirname) + path = dirname + remove_dirs -= parents + + # Remove empty directories that aren't required. + for d in sorted(remove_dirs, key=len, reverse=True): + try: + try: + os.rmdir(d) + except OSError as e: + if e.errno in (errno.EPERM, errno.EACCES): + # Permissions may not allow deletion. So ensure write + # access is in place before attempting to rmdir again. + os.chmod(d, 0o700) + os.rmdir(d) + else: + raise + except OSError as e: + # If remove_unaccounted is a # FileRegistry, then we have a + # list of directories that may not be empty, so ignore rmdir + # ENOTEMPTY errors for them. + if (isinstance(remove_unaccounted, FileRegistry) and + e.errno == errno.ENOTEMPTY): + continue + raise + result.removed_directories.add(d) + + return result + + +class Jarrer(FileRegistry, BaseFile): + ''' + FileRegistry with the ability to copy and pack the registered files as a + jar file. Also acts as a BaseFile instance, to be copied with a FileCopier. + ''' + def __init__(self, compress=True, optimize=True): + ''' + Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation + for details on the compress and optimize arguments. + ''' + self.compress = compress + self.optimize = optimize + self._preload = [] + self._compress_options = {} # Map path to compress boolean option. + FileRegistry.__init__(self) + + def add(self, path, content, compress=None): + FileRegistry.add(self, path, content) + if compress is not None: + self._compress_options[path] = compress + + def copy(self, dest, skip_if_older=True): + ''' + Pack all registered files in the given destination jar. The given + destination jar may be a path to jar file, or a Dest instance for + a jar file. + If the destination jar file exists, its (compressed) contents are used + instead of the registered BaseFile instances when appropriate. + ''' + class DeflaterDest(Dest): + ''' + Dest-like class, reading from a file-like object initially, but + switching to a Deflater object if written to. + + dest = DeflaterDest(original_file) + dest.read() # Reads original_file + dest.write(data) # Creates a Deflater and write data there + dest.read() # Re-opens the Deflater and reads from it + ''' + def __init__(self, orig=None, compress=True): + self.mode = None + self.deflater = orig + self.compress = compress + + def read(self, length=-1): + if self.mode != 'r': + assert self.mode is None + self.mode = 'r' + return self.deflater.read(length) + + def write(self, data): + if self.mode != 'w': + from mozpack.mozjar import Deflater + self.deflater = Deflater(self.compress) + self.mode = 'w' + self.deflater.write(data) + + def exists(self): + return self.deflater is not None + + if isinstance(dest, basestring): + dest = Dest(dest) + assert isinstance(dest, Dest) + + from mozpack.mozjar import JarWriter, JarReader + try: + old_jar = JarReader(fileobj=dest) + except Exception: + old_jar = [] + + old_contents = dict([(f.filename, f) for f in old_jar]) + + with JarWriter(fileobj=dest, compress=self.compress, + optimize=self.optimize) as jar: + for path, file in self: + compress = self._compress_options.get(path, self.compress) + + if path in old_contents: + deflater = DeflaterDest(old_contents[path], compress) + else: + deflater = DeflaterDest(compress=compress) + file.copy(deflater, skip_if_older) + jar.add(path, deflater.deflater, mode=file.mode, compress=compress) + if self._preload: + jar.preload(self._preload) + + def open(self): + raise RuntimeError('unsupported') + + def preload(self, paths): + ''' + Add the given set of paths to the list of preloaded files. See + mozpack.mozjar.JarWriter documentation for details on jar preloading. + ''' + self._preload.extend(paths) diff --git a/python/mozbuild/mozpack/dmg.py b/python/mozbuild/mozpack/dmg.py new file mode 100644 index 000000000..036302214 --- /dev/null +++ b/python/mozbuild/mozpack/dmg.py @@ -0,0 +1,121 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import mozfile +import os +import platform +import shutil +import subprocess + +is_linux = platform.system() == 'Linux' + +def mkdir(dir): + if not os.path.isdir(dir): + try: + os.makedirs(dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +def chmod(dir): + 'Set permissions of DMG contents correctly' + subprocess.check_call(['chmod', '-R', 'a+rX,a-st,u+w,go-w', dir]) + + +def rsync(source, dest): + 'rsync the contents of directory source into directory dest' + # Ensure a trailing slash so rsync copies the *contents* of source. + if not source.endswith('/'): + source += '/' + subprocess.check_call(['rsync', '-a', '--copy-unsafe-links', + source, dest]) + + +def set_folder_icon(dir): + 'Set HFS attributes of dir to use a custom icon' + if not is_linux: + #TODO: bug 1197325 - figure out how to support this on Linux + subprocess.check_call(['SetFile', '-a', 'C', dir]) + + +def create_dmg_from_staged(stagedir, output_dmg, tmpdir, volume_name): + 'Given a prepared directory stagedir, produce a DMG at output_dmg.' + if not is_linux: + # Running on OS X + hybrid = os.path.join(tmpdir, 'hybrid.dmg') + subprocess.check_call(['hdiutil', 'makehybrid', '-hfs', + '-hfs-volume-name', volume_name, + '-hfs-openfolder', stagedir, + '-ov', stagedir, + '-o', hybrid]) + subprocess.check_call(['hdiutil', 'convert', '-format', 'UDBZ', + '-imagekey', 'bzip2-level=9', + '-ov', hybrid, '-o', output_dmg]) + else: + import buildconfig + uncompressed = os.path.join(tmpdir, 'uncompressed.dmg') + subprocess.check_call([ + buildconfig.substs['GENISOIMAGE'], + '-V', volume_name, + '-D', '-R', '-apple', '-no-pad', + '-o', uncompressed, + stagedir + ]) + subprocess.check_call([ + buildconfig.substs['DMG_TOOL'], + 'dmg', + uncompressed, + output_dmg + ], + # dmg is seriously chatty + stdout=open(os.devnull, 'wb')) + +def check_tools(*tools): + ''' + Check that each tool named in tools exists in SUBSTS and is executable. + ''' + import buildconfig + for tool in tools: + path = buildconfig.substs[tool] + if not path: + raise Exception('Required tool "%s" not found' % tool) + if not os.path.isfile(path): + raise Exception('Required tool "%s" not found at path "%s"' % (tool, path)) + if not os.access(path, os.X_OK): + raise Exception('Required tool "%s" at path "%s" is not executable' % (tool, path)) + + +def create_dmg(source_directory, output_dmg, volume_name, extra_files): + ''' + Create a DMG disk image at the path output_dmg from source_directory. + + Use volume_name as the disk image volume name, and + use extra_files as a list of tuples of (filename, relative path) to copy + into the disk image. + ''' + if platform.system() not in ('Darwin', 'Linux'): + raise Exception("Don't know how to build a DMG on '%s'" % platform.system()) + + if is_linux: + check_tools('DMG_TOOL', 'GENISOIMAGE') + with mozfile.TemporaryDirectory() as tmpdir: + stagedir = os.path.join(tmpdir, 'stage') + os.mkdir(stagedir) + # Copy the app bundle over using rsync + rsync(source_directory, stagedir) + # Copy extra files + for source, target in extra_files: + full_target = os.path.join(stagedir, target) + mkdir(os.path.dirname(full_target)) + shutil.copyfile(source, full_target) + # Make a symlink to /Applications. The symlink name is a space + # so we don't have to localize it. The Applications folder icon + # will be shown in Finder, which should be clear enough for users. + os.symlink('/Applications', os.path.join(stagedir, ' ')) + # Set the folder attributes to use a custom icon + set_folder_icon(stagedir) + chmod(stagedir) + create_dmg_from_staged(stagedir, output_dmg, tmpdir, volume_name) diff --git a/python/mozbuild/mozpack/errors.py b/python/mozbuild/mozpack/errors.py new file mode 100644 index 000000000..8b4b80072 --- /dev/null +++ b/python/mozbuild/mozpack/errors.py @@ -0,0 +1,139 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import sys +from contextlib import contextmanager + + +class ErrorMessage(Exception): + '''Exception type raised from errors.error() and errors.fatal()''' + + +class AccumulatedErrors(Exception): + '''Exception type raised from errors.accumulate()''' + + +class ErrorCollector(object): + ''' + Error handling/logging class. A global instance, errors, is provided for + convenience. + + Warnings, errors and fatal errors may be logged by calls to the following + functions: + errors.warn(message) + errors.error(message) + errors.fatal(message) + + Warnings only send the message on the logging output, while errors and + fatal errors send the message and throw an ErrorMessage exception. The + exception, however, may be deferred. See further below. + + Errors may be ignored by calling: + errors.ignore_errors() + + After calling that function, only fatal errors throw an exception. + + The warnings, errors or fatal errors messages may be augmented with context + information when a context is provided. Context is defined by a pair + (filename, linenumber), and may be set with errors.context() used as a + context manager: + with errors.context(filename, linenumber): + errors.warn(message) + + Arbitrary nesting is supported, both for errors.context calls: + with errors.context(filename1, linenumber1): + errors.warn(message) + with errors.context(filename2, linenumber2): + errors.warn(message) + + as well as for function calls: + def func(): + errors.warn(message) + with errors.context(filename, linenumber): + func() + + Errors and fatal errors can have their exception thrown at a later time, + allowing for several different errors to be reported at once before + throwing. This is achieved with errors.accumulate() as a context manager: + with errors.accumulate(): + if test1: + errors.error(message1) + if test2: + errors.error(message2) + + In such cases, a single AccumulatedErrors exception is thrown, but doesn't + contain information about the exceptions. The logged messages do. + ''' + out = sys.stderr + WARN = 1 + ERROR = 2 + FATAL = 3 + _level = ERROR + _context = [] + _count = None + + def ignore_errors(self, ignore=True): + if ignore: + self._level = self.FATAL + else: + self._level = self.ERROR + + def _full_message(self, level, msg): + if level >= self._level: + level = 'Error' + else: + level = 'Warning' + if self._context: + file, line = self._context[-1] + return "%s: %s:%d: %s" % (level, file, line, msg) + return "%s: %s" % (level, msg) + + def _handle(self, level, msg): + msg = self._full_message(level, msg) + if level >= self._level: + if self._count is None: + raise ErrorMessage(msg) + self._count += 1 + print >>self.out, msg + + def fatal(self, msg): + self._handle(self.FATAL, msg) + + def error(self, msg): + self._handle(self.ERROR, msg) + + def warn(self, msg): + self._handle(self.WARN, msg) + + def get_context(self): + if self._context: + return self._context[-1] + + @contextmanager + def context(self, file, line): + if file and line: + self._context.append((file, line)) + yield + if file and line: + self._context.pop() + + @contextmanager + def accumulate(self): + assert self._count is None + self._count = 0 + yield + count = self._count + self._count = None + if count: + raise AccumulatedErrors() + + @property + def count(self): + # _count can be None. + return self._count if self._count else 0 + + +errors = ErrorCollector() diff --git a/python/mozbuild/mozpack/executables.py b/python/mozbuild/mozpack/executables.py new file mode 100644 index 000000000..c943564fa --- /dev/null +++ b/python/mozbuild/mozpack/executables.py @@ -0,0 +1,124 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import os +import struct +import subprocess +from mozpack.errors import errors + +MACHO_SIGNATURES = [ + 0xfeedface, # mach-o 32-bits big endian + 0xcefaedfe, # mach-o 32-bits little endian + 0xfeedfacf, # mach-o 64-bits big endian + 0xcffaedfe, # mach-o 64-bits little endian +] + +FAT_SIGNATURE = 0xcafebabe # mach-o FAT binary + +ELF_SIGNATURE = 0x7f454c46 # Elf binary + +UNKNOWN = 0 +MACHO = 1 +ELF = 2 + +def get_type(path): + ''' + Check the signature of the give file and returns what kind of executable + matches. + ''' + with open(path, 'rb') as f: + signature = f.read(4) + if len(signature) < 4: + return UNKNOWN + signature = struct.unpack('>L', signature)[0] + if signature == ELF_SIGNATURE: + return ELF + if signature in MACHO_SIGNATURES: + return MACHO + if signature != FAT_SIGNATURE: + return UNKNOWN + # We have to sanity check the second four bytes, because Java class + # files use the same magic number as Mach-O fat binaries. + # This logic is adapted from file(1), which says that Mach-O uses + # these bytes to count the number of architectures within, while + # Java uses it for a version number. Conveniently, there are only + # 18 labelled Mach-O architectures, and Java's first released + # class format used the version 43.0. + num = f.read(4) + if len(num) < 4: + return UNKNOWN + num = struct.unpack('>L', num)[0] + if num < 20: + return MACHO + return UNKNOWN + + +def is_executable(path): + ''' + Return whether a given file path points to an executable or a library, + where an executable or library is identified by: + - the file extension on OS/2 and WINNT + - the file signature on OS/X and ELF systems (GNU/Linux, Android, BSD, + Solaris) + + As this function is intended for use to choose between the ExecutableFile + and File classes in FileFinder, and choosing ExecutableFile only matters + on OS/2, OS/X, ELF and WINNT (in GCC build) systems, we don't bother + detecting other kind of executables. + ''' + from buildconfig import substs + if not os.path.exists(path): + return False + + if substs['OS_ARCH'] == 'WINNT': + return path.lower().endswith((substs['DLL_SUFFIX'], + substs['BIN_SUFFIX'])) + + return get_type(path) != UNKNOWN + + +def may_strip(path): + ''' + Return whether strip() should be called + ''' + from buildconfig import substs + return not substs['PKG_SKIP_STRIP'] + + +def strip(path): + ''' + Execute the STRIP command with STRIP_FLAGS on the given path. + ''' + from buildconfig import substs + strip = substs['STRIP'] + flags = substs['STRIP_FLAGS'].split() if 'STRIP_FLAGS' in substs else [] + cmd = [strip] + flags + [path] + if subprocess.call(cmd) != 0: + errors.fatal('Error executing ' + ' '.join(cmd)) + + +def may_elfhack(path): + ''' + Return whether elfhack() should be called + ''' + # elfhack only supports libraries. We should check the ELF header for + # the right flag, but checking the file extension works too. + from buildconfig import substs + return ('USE_ELF_HACK' in substs and substs['USE_ELF_HACK'] and + path.endswith(substs['DLL_SUFFIX']) and + 'COMPILE_ENVIRONMENT' in substs and substs['COMPILE_ENVIRONMENT']) + + +def elfhack(path): + ''' + Execute the elfhack command on the given path. + ''' + from buildconfig import topobjdir + cmd = [os.path.join(topobjdir, 'build/unix/elfhack/elfhack'), path] + if 'ELF_HACK_FLAGS' in os.environ: + cmd[1:0] = os.environ['ELF_HACK_FLAGS'].split() + if subprocess.call(cmd) != 0: + errors.fatal('Error executing ' + ' '.join(cmd)) diff --git a/python/mozbuild/mozpack/files.py b/python/mozbuild/mozpack/files.py new file mode 100644 index 000000000..64902e195 --- /dev/null +++ b/python/mozbuild/mozpack/files.py @@ -0,0 +1,1106 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import errno +import os +import platform +import shutil +import stat +import subprocess +import uuid +import mozbuild.makeutil as makeutil +from mozbuild.preprocessor import Preprocessor +from mozbuild.util import FileAvoidWrite +from mozpack.executables import ( + is_executable, + may_strip, + strip, + may_elfhack, + elfhack, +) +from mozpack.chrome.manifest import ManifestEntry +from io import BytesIO +from mozpack.errors import ( + ErrorMessage, + errors, +) +from mozpack.mozjar import JarReader +import mozpack.path as mozpath +from collections import OrderedDict +from jsmin import JavascriptMinify +from tempfile import ( + mkstemp, + NamedTemporaryFile, +) +from tarfile import ( + TarFile, + TarInfo, +) +try: + import hglib +except ImportError: + hglib = None + + +# For clean builds, copying files on win32 using CopyFile through ctypes is +# ~2x as fast as using shutil.copyfile. +if platform.system() != 'Windows': + _copyfile = shutil.copyfile +else: + import ctypes + _kernel32 = ctypes.windll.kernel32 + _CopyFileA = _kernel32.CopyFileA + _CopyFileW = _kernel32.CopyFileW + + def _copyfile(src, dest): + # False indicates `dest` should be overwritten if it exists already. + if isinstance(src, unicode) and isinstance(dest, unicode): + _CopyFileW(src, dest, False) + elif isinstance(src, str) and isinstance(dest, str): + _CopyFileA(src, dest, False) + else: + raise TypeError('mismatched path types!') + +class Dest(object): + ''' + Helper interface for BaseFile.copy. The interface works as follows: + - read() and write() can be used to sequentially read/write from the + underlying file. + - a call to read() after a write() will re-open the underlying file and + read from it. + - a call to write() after a read() will re-open the underlying file, + emptying it, and write to it. + ''' + def __init__(self, path): + self.path = path + self.mode = None + + @property + def name(self): + return self.path + + def read(self, length=-1): + if self.mode != 'r': + self.file = open(self.path, 'rb') + self.mode = 'r' + return self.file.read(length) + + def write(self, data): + if self.mode != 'w': + self.file = open(self.path, 'wb') + self.mode = 'w' + return self.file.write(data) + + def exists(self): + return os.path.exists(self.path) + + def close(self): + if self.mode: + self.mode = None + self.file.close() + + +class BaseFile(object): + ''' + Base interface and helper for file copying. Derived class may implement + their own copy function, or rely on BaseFile.copy using the open() member + function and/or the path property. + ''' + @staticmethod + def is_older(first, second): + ''' + Compares the modification time of two files, and returns whether the + ``first`` file is older than the ``second`` file. + ''' + # os.path.getmtime returns a result in seconds with precision up to + # the microsecond. But microsecond is too precise because + # shutil.copystat only copies milliseconds, and seconds is not + # enough precision. + return int(os.path.getmtime(first) * 1000) \ + <= int(os.path.getmtime(second) * 1000) + + @staticmethod + def any_newer(dest, inputs): + ''' + Compares the modification time of ``dest`` to multiple input files, and + returns whether any of the ``inputs`` is newer (has a later mtime) than + ``dest``. + ''' + # os.path.getmtime returns a result in seconds with precision up to + # the microsecond. But microsecond is too precise because + # shutil.copystat only copies milliseconds, and seconds is not + # enough precision. + dest_mtime = int(os.path.getmtime(dest) * 1000) + for input in inputs: + if dest_mtime < int(os.path.getmtime(input) * 1000): + return True + return False + + @staticmethod + def normalize_mode(mode): + # Normalize file mode: + # - keep file type (e.g. S_IFREG) + ret = stat.S_IFMT(mode) + # - expand user read and execute permissions to everyone + if mode & 0400: + ret |= 0444 + if mode & 0100: + ret |= 0111 + # - keep user write permissions + if mode & 0200: + ret |= 0200 + # - leave away sticky bit, setuid, setgid + return ret + + def copy(self, dest, skip_if_older=True): + ''' + Copy the BaseFile content to the destination given as a string or a + Dest instance. Avoids replacing existing files if the BaseFile content + matches that of the destination, or in case of plain files, if the + destination is newer than the original file. This latter behaviour is + disabled when skip_if_older is False. + Returns whether a copy was actually performed (True) or not (False). + ''' + if isinstance(dest, basestring): + dest = Dest(dest) + else: + assert isinstance(dest, Dest) + + can_skip_content_check = False + if not dest.exists(): + can_skip_content_check = True + elif getattr(self, 'path', None) and getattr(dest, 'path', None): + if skip_if_older and BaseFile.is_older(self.path, dest.path): + return False + elif os.path.getsize(self.path) != os.path.getsize(dest.path): + can_skip_content_check = True + + if can_skip_content_check: + if getattr(self, 'path', None) and getattr(dest, 'path', None): + _copyfile(self.path, dest.path) + shutil.copystat(self.path, dest.path) + else: + # Ensure the file is always created + if not dest.exists(): + dest.write('') + shutil.copyfileobj(self.open(), dest) + return True + + src = self.open() + copy_content = '' + while True: + dest_content = dest.read(32768) + src_content = src.read(32768) + copy_content += src_content + if len(dest_content) == len(src_content) == 0: + break + # If the read content differs between origin and destination, + # write what was read up to now, and copy the remainder. + if dest_content != src_content: + dest.write(copy_content) + shutil.copyfileobj(src, dest) + break + if hasattr(self, 'path') and hasattr(dest, 'path'): + shutil.copystat(self.path, dest.path) + return True + + def open(self): + ''' + Return a file-like object allowing to read() the content of the + associated file. This is meant to be overloaded in subclasses to return + a custom file-like object. + ''' + assert self.path is not None + return open(self.path, 'rb') + + def read(self): + raise NotImplementedError('BaseFile.read() not implemented. Bug 1170329.') + + @property + def mode(self): + ''' + Return the file's unix mode, or None if it has no meaning. + ''' + return None + + +class File(BaseFile): + ''' + File class for plain files. + ''' + def __init__(self, path): + self.path = path + + @property + def mode(self): + ''' + Return the file's unix mode, as returned by os.stat().st_mode. + ''' + if platform.system() == 'Windows': + return None + assert self.path is not None + mode = os.stat(self.path).st_mode + return self.normalize_mode(mode) + + def read(self): + '''Return the contents of the file.''' + with open(self.path, 'rb') as fh: + return fh.read() + + +class ExecutableFile(File): + ''' + File class for executable and library files on OS/2, OS/X and ELF systems. + (see mozpack.executables.is_executable documentation). + ''' + def copy(self, dest, skip_if_older=True): + real_dest = dest + if not isinstance(dest, basestring): + fd, dest = mkstemp() + os.close(fd) + os.remove(dest) + assert isinstance(dest, basestring) + # If File.copy didn't actually copy because dest is newer, check the + # file sizes. If dest is smaller, it means it is already stripped and + # elfhacked, so we can skip. + if not File.copy(self, dest, skip_if_older) and \ + os.path.getsize(self.path) > os.path.getsize(dest): + return False + try: + if may_strip(dest): + strip(dest) + if may_elfhack(dest): + elfhack(dest) + except ErrorMessage: + os.remove(dest) + raise + + if real_dest != dest: + f = File(dest) + ret = f.copy(real_dest, skip_if_older) + os.remove(dest) + return ret + return True + + +class AbsoluteSymlinkFile(File): + '''File class that is copied by symlinking (if available). + + This class only works if the target path is absolute. + ''' + + def __init__(self, path): + if not os.path.isabs(path): + raise ValueError('Symlink target not absolute: %s' % path) + + File.__init__(self, path) + + def copy(self, dest, skip_if_older=True): + assert isinstance(dest, basestring) + + # The logic in this function is complicated by the fact that symlinks + # aren't universally supported. So, where symlinks aren't supported, we + # fall back to file copying. Keep in mind that symlink support is + # per-filesystem, not per-OS. + + # Handle the simple case where symlinks are definitely not supported by + # falling back to file copy. + if not hasattr(os, 'symlink'): + return File.copy(self, dest, skip_if_older=skip_if_older) + + # Always verify the symlink target path exists. + if not os.path.exists(self.path): + raise ErrorMessage('Symlink target path does not exist: %s' % self.path) + + st = None + + try: + st = os.lstat(dest) + except OSError as ose: + if ose.errno != errno.ENOENT: + raise + + # If the dest is a symlink pointing to us, we have nothing to do. + # If it's the wrong symlink, the filesystem must support symlinks, + # so we replace with a proper symlink. + if st and stat.S_ISLNK(st.st_mode): + link = os.readlink(dest) + if link == self.path: + return False + + os.remove(dest) + os.symlink(self.path, dest) + return True + + # If the destination doesn't exist, we try to create a symlink. If that + # fails, we fall back to copy code. + if not st: + try: + os.symlink(self.path, dest) + return True + except OSError: + return File.copy(self, dest, skip_if_older=skip_if_older) + + # Now the complicated part. If the destination exists, we could be + # replacing a file with a symlink. Or, the filesystem may not support + # symlinks. We want to minimize I/O overhead for performance reasons, + # so we keep the existing destination file around as long as possible. + # A lot of the system calls would be eliminated if we cached whether + # symlinks are supported. However, even if we performed a single + # up-front test of whether the root of the destination directory + # supports symlinks, there's no guarantee that all operations for that + # dest (or source) would be on the same filesystem and would support + # symlinks. + # + # Our strategy is to attempt to create a new symlink with a random + # name. If that fails, we fall back to copy mode. If that works, we + # remove the old destination and move the newly-created symlink into + # its place. + + temp_dest = os.path.join(os.path.dirname(dest), str(uuid.uuid4())) + try: + os.symlink(self.path, temp_dest) + # TODO Figure out exactly how symlink creation fails and only trap + # that. + except EnvironmentError: + return File.copy(self, dest, skip_if_older=skip_if_older) + + # If removing the original file fails, don't forget to clean up the + # temporary symlink. + try: + os.remove(dest) + except EnvironmentError: + os.remove(temp_dest) + raise + + os.rename(temp_dest, dest) + return True + + +class ExistingFile(BaseFile): + ''' + File class that represents a file that may exist but whose content comes + from elsewhere. + + This purpose of this class is to account for files that are installed via + external means. It is typically only used in manifests or in registries to + account for files. + + When asked to copy, this class does nothing because nothing is known about + the source file/data. + + Instances of this class come in two flavors: required and optional. If an + existing file is required, it must exist during copy() or an error is + raised. + ''' + def __init__(self, required): + self.required = required + + def copy(self, dest, skip_if_older=True): + if isinstance(dest, basestring): + dest = Dest(dest) + else: + assert isinstance(dest, Dest) + + if not self.required: + return + + if not dest.exists(): + errors.fatal("Required existing file doesn't exist: %s" % + dest.path) + + +class PreprocessedFile(BaseFile): + ''' + File class for a file that is preprocessed. PreprocessedFile.copy() runs + the preprocessor on the file to create the output. + ''' + def __init__(self, path, depfile_path, marker, defines, extra_depends=None, + silence_missing_directive_warnings=False): + self.path = path + self.depfile = depfile_path + self.marker = marker + self.defines = defines + self.extra_depends = list(extra_depends or []) + self.silence_missing_directive_warnings = \ + silence_missing_directive_warnings + + def copy(self, dest, skip_if_older=True): + ''' + Invokes the preprocessor to create the destination file. + ''' + if isinstance(dest, basestring): + dest = Dest(dest) + else: + assert isinstance(dest, Dest) + + # We have to account for the case where the destination exists and is a + # symlink to something. Since we know the preprocessor is certainly not + # going to create a symlink, we can just remove the existing one. If the + # destination is not a symlink, we leave it alone, since we're going to + # overwrite its contents anyway. + # If symlinks aren't supported at all, we can skip this step. + if hasattr(os, 'symlink'): + if os.path.islink(dest.path): + os.remove(dest.path) + + pp_deps = set(self.extra_depends) + + # If a dependency file was specified, and it exists, add any + # dependencies from that file to our list. + if self.depfile and os.path.exists(self.depfile): + target = mozpath.normpath(dest.name) + with open(self.depfile, 'rb') as fileobj: + for rule in makeutil.read_dep_makefile(fileobj): + if target in rule.targets(): + pp_deps.update(rule.dependencies()) + + skip = False + if dest.exists() and skip_if_older: + # If a dependency file was specified, and it doesn't exist, + # assume that the preprocessor needs to be rerun. That will + # regenerate the dependency file. + if self.depfile and not os.path.exists(self.depfile): + skip = False + else: + skip = not BaseFile.any_newer(dest.path, pp_deps) + + if skip: + return False + + deps_out = None + if self.depfile: + deps_out = FileAvoidWrite(self.depfile) + pp = Preprocessor(defines=self.defines, marker=self.marker) + pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings) + + with open(self.path, 'rU') as input: + pp.processFile(input=input, output=dest, depfile=deps_out) + + dest.close() + if self.depfile: + deps_out.close() + + return True + + +class GeneratedFile(BaseFile): + ''' + File class for content with no previous existence on the filesystem. + ''' + def __init__(self, content): + self.content = content + + def open(self): + return BytesIO(self.content) + + +class DeflatedFile(BaseFile): + ''' + File class for members of a jar archive. DeflatedFile.copy() effectively + extracts the file from the jar archive. + ''' + def __init__(self, file): + from mozpack.mozjar import JarFileReader + assert isinstance(file, JarFileReader) + self.file = file + + def open(self): + self.file.seek(0) + return self.file + +class ExtractedTarFile(GeneratedFile): + ''' + File class for members of a tar archive. Contents of the underlying file + are extracted immediately and stored in memory. + ''' + def __init__(self, tar, info): + assert isinstance(info, TarInfo) + assert isinstance(tar, TarFile) + GeneratedFile.__init__(self, tar.extractfile(info).read()) + self._mode = self.normalize_mode(info.mode) + + @property + def mode(self): + return self._mode + + def read(self): + return self.content + +class XPTFile(GeneratedFile): + ''' + File class for a linked XPT file. It takes several XPT files as input + (using the add() and remove() member functions), and links them at copy() + time. + ''' + def __init__(self): + self._files = set() + + def add(self, xpt): + ''' + Add the given XPT file (as a BaseFile instance) to the list of XPTs + to link. + ''' + assert isinstance(xpt, BaseFile) + self._files.add(xpt) + + def remove(self, xpt): + ''' + Remove the given XPT file (as a BaseFile instance) from the list of + XPTs to link. + ''' + assert isinstance(xpt, BaseFile) + self._files.remove(xpt) + + def copy(self, dest, skip_if_older=True): + ''' + Link the registered XPTs and place the resulting linked XPT at the + destination given as a string or a Dest instance. Avoids an expensive + XPT linking if the interfaces in an existing destination match those of + the individual XPTs to link. + skip_if_older is ignored. + ''' + if isinstance(dest, basestring): + dest = Dest(dest) + assert isinstance(dest, Dest) + + from xpt import xpt_link, Typelib, Interface + all_typelibs = [Typelib.read(f.open()) for f in self._files] + if dest.exists(): + # Typelib.read() needs to seek(), so use a BytesIO for dest + # content. + dest_interfaces = \ + dict((i.name, i) + for i in Typelib.read(BytesIO(dest.read())).interfaces + if i.iid != Interface.UNRESOLVED_IID) + identical = True + for f in self._files: + typelib = Typelib.read(f.open()) + for i in typelib.interfaces: + if i.iid != Interface.UNRESOLVED_IID and \ + not (i.name in dest_interfaces and + i == dest_interfaces[i.name]): + identical = False + break + if identical: + return False + s = BytesIO() + xpt_link(all_typelibs).write(s) + dest.write(s.getvalue()) + return True + + def open(self): + raise RuntimeError("Unsupported") + + def isempty(self): + ''' + Return whether there are XPT files to link. + ''' + return len(self._files) == 0 + + +class ManifestFile(BaseFile): + ''' + File class for a manifest file. It takes individual manifest entries (using + the add() and remove() member functions), and adjusts them to be relative + to the base path for the manifest, given at creation. + Example: + There is a manifest entry "content foobar foobar/content/" relative + to "foobar/chrome". When packaging, the entry will be stored in + jar:foobar/omni.ja!/chrome/chrome.manifest, which means the entry + will have to be relative to "chrome" instead of "foobar/chrome". This + doesn't really matter when serializing the entry, since this base path + is not written out, but it matters when moving the entry at the same + time, e.g. to jar:foobar/omni.ja!/chrome.manifest, which we don't do + currently but could in the future. + ''' + def __init__(self, base, entries=None): + self._entries = entries if entries else [] + self._base = base + + def add(self, entry): + ''' + Add the given entry to the manifest. Entries are rebased at open() time + instead of add() time so that they can be more easily remove()d. + ''' + assert isinstance(entry, ManifestEntry) + self._entries.append(entry) + + def remove(self, entry): + ''' + Remove the given entry from the manifest. + ''' + assert isinstance(entry, ManifestEntry) + self._entries.remove(entry) + + def open(self): + ''' + Return a file-like object allowing to read() the serialized content of + the manifest. + ''' + return BytesIO(''.join('%s\n' % e.rebase(self._base) + for e in self._entries)) + + def __iter__(self): + ''' + Iterate over entries in the manifest file. + ''' + return iter(self._entries) + + def isempty(self): + ''' + Return whether there are manifest entries to write + ''' + return len(self._entries) == 0 + + +class MinifiedProperties(BaseFile): + ''' + File class for minified properties. This wraps around a BaseFile instance, + and removes lines starting with a # from its content. + ''' + def __init__(self, file): + assert isinstance(file, BaseFile) + self._file = file + + def open(self): + ''' + Return a file-like object allowing to read() the minified content of + the properties file. + ''' + return BytesIO(''.join(l for l in self._file.open().readlines() + if not l.startswith('#'))) + + +class MinifiedJavaScript(BaseFile): + ''' + File class for minifying JavaScript files. + ''' + def __init__(self, file, verify_command=None): + assert isinstance(file, BaseFile) + self._file = file + self._verify_command = verify_command + + def open(self): + output = BytesIO() + minify = JavascriptMinify(self._file.open(), output, quote_chars="'\"`") + minify.minify() + output.seek(0) + + if not self._verify_command: + return output + + input_source = self._file.open().read() + output_source = output.getvalue() + + with NamedTemporaryFile() as fh1, NamedTemporaryFile() as fh2: + fh1.write(input_source) + fh2.write(output_source) + fh1.flush() + fh2.flush() + + try: + args = list(self._verify_command) + args.extend([fh1.name, fh2.name]) + subprocess.check_output(args, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + errors.warn('JS minification verification failed for %s:' % + (getattr(self._file, 'path', '<unknown>'))) + # Prefix each line with "Warning:" so mozharness doesn't + # think these error messages are real errors. + for line in e.output.splitlines(): + errors.warn(line) + + return self._file.open() + + return output + + +class BaseFinder(object): + def __init__(self, base, minify=False, minify_js=False, + minify_js_verify_command=None): + ''' + Initializes the instance with a reference base directory. + + The optional minify argument specifies whether minification of code + should occur. minify_js is an additional option to control minification + of JavaScript. It requires minify to be True. + + minify_js_verify_command can be used to optionally verify the results + of JavaScript minification. If defined, it is expected to be an iterable + that will constitute the first arguments to a called process which will + receive the filenames of the original and minified JavaScript files. + The invoked process can then verify the results. If minification is + rejected, the process exits with a non-0 exit code and the original + JavaScript source is used. An example value for this argument is + ('/path/to/js', '/path/to/verify/script.js'). + ''' + if minify_js and not minify: + raise ValueError('minify_js requires minify.') + + self.base = base + self._minify = minify + self._minify_js = minify_js + self._minify_js_verify_command = minify_js_verify_command + + def find(self, pattern): + ''' + Yield path, BaseFile_instance pairs for all files under the base + directory and its subdirectories that match the given pattern. See the + mozpack.path.match documentation for a description of the handled + patterns. + ''' + while pattern.startswith('/'): + pattern = pattern[1:] + for p, f in self._find(pattern): + yield p, self._minify_file(p, f) + + def get(self, path): + """Obtain a single file. + + Where ``find`` is tailored towards matching multiple files, this method + is used for retrieving a single file. Use this method when performance + is critical. + + Returns a ``BaseFile`` if at most one file exists or ``None`` otherwise. + """ + files = list(self.find(path)) + if len(files) != 1: + return None + return files[0][1] + + def __iter__(self): + ''' + Iterates over all files under the base directory (excluding files + starting with a '.' and files at any level under a directory starting + with a '.'). + for path, file in finder: + ... + ''' + return self.find('') + + def __contains__(self, pattern): + raise RuntimeError("'in' operator forbidden for %s. Use contains()." % + self.__class__.__name__) + + def contains(self, pattern): + ''' + Return whether some files under the base directory match the given + pattern. See the mozpack.path.match documentation for a description of + the handled patterns. + ''' + return any(self.find(pattern)) + + def _minify_file(self, path, file): + ''' + Return an appropriate MinifiedSomething wrapper for the given BaseFile + instance (file), according to the file type (determined by the given + path), if the FileFinder was created with minification enabled. + Otherwise, just return the given BaseFile instance. + ''' + if not self._minify or isinstance(file, ExecutableFile): + return file + + if path.endswith('.properties'): + return MinifiedProperties(file) + + if self._minify_js and path.endswith(('.js', '.jsm')): + return MinifiedJavaScript(file, self._minify_js_verify_command) + + return file + + def _find_helper(self, pattern, files, file_getter): + """Generic implementation of _find. + + A few *Finder implementations share logic for returning results. + This function implements the custom logic. + + The ``file_getter`` argument is a callable that receives a path + that is known to exist. The callable should return a ``BaseFile`` + instance. + """ + if '*' in pattern: + for p in files: + if mozpath.match(p, pattern): + yield p, file_getter(p) + elif pattern == '': + for p in files: + yield p, file_getter(p) + elif pattern in files: + yield pattern, file_getter(pattern) + else: + for p in files: + if mozpath.basedir(p, [pattern]) == pattern: + yield p, file_getter(p) + + +class FileFinder(BaseFinder): + ''' + Helper to get appropriate BaseFile instances from the file system. + ''' + def __init__(self, base, find_executables=True, ignore=(), + find_dotfiles=False, **kargs): + ''' + Create a FileFinder for files under the given base directory. + + The find_executables argument determines whether the finder needs to + try to guess whether files are executables. Disabling this guessing + when not necessary can speed up the finder significantly. + + ``ignore`` accepts an iterable of patterns to ignore. Entries are + strings that match paths relative to ``base`` using + ``mozpath.match()``. This means if an entry corresponds + to a directory, all files under that directory will be ignored. If + an entry corresponds to a file, that particular file will be ignored. + ''' + BaseFinder.__init__(self, base, **kargs) + self.find_dotfiles = find_dotfiles + self.find_executables = find_executables + self.ignore = ignore + + def _find(self, pattern): + ''' + Actual implementation of FileFinder.find(), dispatching to specialized + member functions depending on what kind of pattern was given. + Note all files with a name starting with a '.' are ignored when + scanning directories, but are not ignored when explicitely requested. + ''' + if '*' in pattern: + return self._find_glob('', mozpath.split(pattern)) + elif os.path.isdir(os.path.join(self.base, pattern)): + return self._find_dir(pattern) + else: + f = self.get(pattern) + return ((pattern, f),) if f else () + + def _find_dir(self, path): + ''' + Actual implementation of FileFinder.find() when the given pattern + corresponds to an existing directory under the base directory. + Ignores file names starting with a '.' under the given path. If the + path itself has leafs starting with a '.', they are not ignored. + ''' + for p in self.ignore: + if mozpath.match(path, p): + return + + # The sorted makes the output idempotent. Otherwise, we are + # likely dependent on filesystem implementation details, such as + # inode ordering. + for p in sorted(os.listdir(os.path.join(self.base, path))): + if p.startswith('.'): + if p in ('.', '..'): + continue + if not self.find_dotfiles: + continue + for p_, f in self._find(mozpath.join(path, p)): + yield p_, f + + def get(self, path): + srcpath = os.path.join(self.base, path) + if not os.path.exists(srcpath): + return None + + for p in self.ignore: + if mozpath.match(path, p): + return None + + if self.find_executables and is_executable(srcpath): + return ExecutableFile(srcpath) + else: + return File(srcpath) + + def _find_glob(self, base, pattern): + ''' + Actual implementation of FileFinder.find() when the given pattern + contains globbing patterns ('*' or '**'). This is meant to be an + equivalent of: + for p, f in self: + if mozpath.match(p, pattern): + yield p, f + but avoids scanning the entire tree. + ''' + if not pattern: + for p, f in self._find(base): + yield p, f + elif pattern[0] == '**': + for p, f in self._find(base): + if mozpath.match(p, mozpath.join(*pattern)): + yield p, f + elif '*' in pattern[0]: + if not os.path.exists(os.path.join(self.base, base)): + return + + for p in self.ignore: + if mozpath.match(base, p): + return + + # See above comment w.r.t. sorted() and idempotent behavior. + for p in sorted(os.listdir(os.path.join(self.base, base))): + if p.startswith('.') and not pattern[0].startswith('.'): + continue + if mozpath.match(p, pattern[0]): + for p_, f in self._find_glob(mozpath.join(base, p), + pattern[1:]): + yield p_, f + else: + for p, f in self._find_glob(mozpath.join(base, pattern[0]), + pattern[1:]): + yield p, f + + +class JarFinder(BaseFinder): + ''' + Helper to get appropriate DeflatedFile instances from a JarReader. + ''' + def __init__(self, base, reader, **kargs): + ''' + Create a JarFinder for files in the given JarReader. The base argument + is used as an indication of the Jar file location. + ''' + assert isinstance(reader, JarReader) + BaseFinder.__init__(self, base, **kargs) + self._files = OrderedDict((f.filename, f) for f in reader) + + def _find(self, pattern): + ''' + Actual implementation of JarFinder.find(), dispatching to specialized + member functions depending on what kind of pattern was given. + ''' + return self._find_helper(pattern, self._files, + lambda x: DeflatedFile(self._files[x])) + + +class TarFinder(BaseFinder): + ''' + Helper to get files from a TarFile. + ''' + def __init__(self, base, tar, **kargs): + ''' + Create a TarFinder for files in the given TarFile. The base argument + is used as an indication of the Tar file location. + ''' + assert isinstance(tar, TarFile) + self._tar = tar + BaseFinder.__init__(self, base, **kargs) + self._files = OrderedDict((f.name, f) for f in tar if f.isfile()) + + def _find(self, pattern): + ''' + Actual implementation of TarFinder.find(), dispatching to specialized + member functions depending on what kind of pattern was given. + ''' + return self._find_helper(pattern, self._files, + lambda x: ExtractedTarFile(self._tar, + self._files[x])) + + +class ComposedFinder(BaseFinder): + ''' + Composes multiple File Finders in some sort of virtual file system. + + A ComposedFinder is initialized from a dictionary associating paths to + *Finder instances. + + Note this could be optimized to be smarter than getting all the files + in advance. + ''' + def __init__(self, finders): + # Can't import globally, because of the dependency of mozpack.copier + # on this module. + from mozpack.copier import FileRegistry + self.files = FileRegistry() + + for base, finder in sorted(finders.iteritems()): + if self.files.contains(base): + self.files.remove(base) + for p, f in finder.find(''): + self.files.add(mozpath.join(base, p), f) + + def find(self, pattern): + for p in self.files.match(pattern): + yield p, self.files[p] + + +class MercurialFile(BaseFile): + """File class for holding data from Mercurial.""" + def __init__(self, client, rev, path): + self._content = client.cat([path], rev=rev) + + def read(self): + return self._content + + +class MercurialRevisionFinder(BaseFinder): + """A finder that operates on a specific Mercurial revision.""" + + def __init__(self, repo, rev='.', recognize_repo_paths=False, **kwargs): + """Create a finder attached to a specific revision in a repository. + + If no revision is given, open the parent of the working directory. + + ``recognize_repo_paths`` will enable a mode where ``.get()`` will + recognize full paths that include the repo's path. Typically Finder + instances are "bound" to a base directory and paths are relative to + that directory. This mode changes that. When this mode is activated, + ``.find()`` will not work! This mode exists to support the moz.build + reader, which uses absolute paths instead of relative paths. The reader + should eventually be rewritten to use relative paths and this hack + should be removed (TODO bug 1171069). + """ + if not hglib: + raise Exception('hglib package not found') + + super(MercurialRevisionFinder, self).__init__(base=repo, **kwargs) + + self._root = mozpath.normpath(repo).rstrip('/') + self._recognize_repo_paths = recognize_repo_paths + + # We change directories here otherwise we have to deal with relative + # paths. + oldcwd = os.getcwd() + os.chdir(self._root) + try: + self._client = hglib.open(path=repo, encoding=b'utf-8') + finally: + os.chdir(oldcwd) + self._rev = rev if rev is not None else b'.' + self._files = OrderedDict() + + # Immediately populate the list of files in the repo since nearly every + # operation requires this list. + out = self._client.rawcommand([b'files', b'--rev', str(self._rev)]) + for relpath in out.splitlines(): + self._files[relpath] = None + + def _find(self, pattern): + if self._recognize_repo_paths: + raise NotImplementedError('cannot use find with recognize_repo_path') + + return self._find_helper(pattern, self._files, self._get) + + def get(self, path): + if self._recognize_repo_paths: + if not path.startswith(self._root): + raise ValueError('lookups in recognize_repo_paths mode must be ' + 'prefixed with repo path: %s' % path) + path = path[len(self._root) + 1:] + + try: + return self._get(path) + except KeyError: + return None + + def _get(self, path): + # We lazy populate self._files because potentially creating tens of + # thousands of MercurialFile instances for every file in the repo is + # inefficient. + f = self._files[path] + if not f: + f = MercurialFile(self._client, self._rev, path) + self._files[path] = f + + return f diff --git a/python/mozbuild/mozpack/hg.py b/python/mozbuild/mozpack/hg.py new file mode 100644 index 000000000..79876061f --- /dev/null +++ b/python/mozbuild/mozpack/hg.py @@ -0,0 +1,95 @@ +# Copyright (C) 2015 Mozilla Contributors +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# As a special exception, the copyright holders of this code give you +# permission to combine this code with the software known as 'mozbuild', +# and to distribute those combinations without any restriction +# coming from the use of this file. (The General Public License +# restrictions do apply in other respects; for example, they cover +# modification of the file, and distribution when not combined with +# mozbuild.) +# +# If you modify this code, you may extend this exception to your +# version of the code, but you are not obliged to do so. If you +# do not wish to do so, delete this exception statement from your +# version. + +from __future__ import absolute_import + +import mercurial.error as error +import mercurial.hg as hg +import mercurial.ui as hgui + +from .files import ( + BaseFinder, + MercurialFile, +) +import mozpack.path as mozpath + + +# This isn't a complete implementation of BaseFile. But it is complete +# enough for moz.build reading. +class MercurialNativeFile(MercurialFile): + def __init__(self, data): + self.data = data + + def read(self): + return self.data + + +class MercurialNativeRevisionFinder(BaseFinder): + def __init__(self, repo, rev='.', recognize_repo_paths=False): + """Create a finder attached to a specific changeset. + + Accepts a Mercurial localrepo and changectx instance. + """ + if isinstance(repo, (str, unicode)): + path = repo + repo = hg.repository(hgui.ui(), repo) + else: + path = repo.root + + super(MercurialNativeRevisionFinder, self).__init__(base=repo.root) + + self._repo = repo + self._rev = rev + self._root = mozpath.normpath(path) + self._recognize_repo_paths = recognize_repo_paths + + def _find(self, pattern): + if self._recognize_repo_paths: + raise NotImplementedError('cannot use find with recognize_repo_path') + + return self._find_helper(pattern, self._repo[self._rev], self._get) + + def get(self, path): + if self._recognize_repo_paths: + if not path.startswith(self._root): + raise ValueError('lookups in recognize_repo_paths mode must be ' + 'prefixed with repo path: %s' % path) + path = path[len(self._root) + 1:] + + return self._get(path) + + def _get(self, path): + if isinstance(path, unicode): + path = path.encode('utf-8', 'replace') + + try: + fctx = self._repo.filectx(path, self._rev) + return MercurialNativeFile(fctx.data()) + except error.LookupError: + return None diff --git a/python/mozbuild/mozpack/manifests.py b/python/mozbuild/mozpack/manifests.py new file mode 100644 index 000000000..93bd6c2ca --- /dev/null +++ b/python/mozbuild/mozpack/manifests.py @@ -0,0 +1,419 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, unicode_literals + +from contextlib import contextmanager +import json + +from .files import ( + AbsoluteSymlinkFile, + ExistingFile, + File, + FileFinder, + GeneratedFile, + PreprocessedFile, +) +import mozpack.path as mozpath + + +# This probably belongs in a more generic module. Where? +@contextmanager +def _auto_fileobj(path, fileobj, mode='r'): + if path and fileobj: + raise AssertionError('Only 1 of path or fileobj may be defined.') + + if not path and not fileobj: + raise AssertionError('Must specified 1 of path or fileobj.') + + if path: + fileobj = open(path, mode) + + try: + yield fileobj + finally: + if path: + fileobj.close() + + +class UnreadableInstallManifest(Exception): + """Raised when an invalid install manifest is parsed.""" + + +class InstallManifest(object): + """Describes actions to be used with a copier.FileCopier instance. + + This class facilitates serialization and deserialization of data used to + construct a copier.FileCopier and to perform copy operations. + + The manifest defines source paths, destination paths, and a mechanism by + which the destination file should come into existence. + + Entries in the manifest correspond to the following types: + + copy -- The file specified as the source path will be copied to the + destination path. + + symlink -- The destination path will be a symlink to the source path. + If symlinks are not supported, a copy will be performed. + + exists -- The destination path is accounted for and won't be deleted by + the FileCopier. If the destination path doesn't exist, an error is + raised. + + optional -- The destination path is accounted for and won't be deleted by + the FileCopier. No error is raised if the destination path does not + exist. + + patternsymlink -- Paths matched by the expression in the source path + will be symlinked to the destination directory. + + patterncopy -- Similar to patternsymlink except files are copied, not + symlinked. + + preprocess -- The file specified at the source path will be run through + the preprocessor, and the output will be written to the destination + path. + + content -- The destination file will be created with the given content. + + Version 1 of the manifest was the initial version. + Version 2 added optional path support + Version 3 added support for pattern entries. + Version 4 added preprocessed file support. + Version 5 added content support. + """ + + CURRENT_VERSION = 5 + + FIELD_SEPARATOR = '\x1f' + + # Negative values are reserved for non-actionable items, that is, metadata + # that doesn't describe files in the destination. + SYMLINK = 1 + COPY = 2 + REQUIRED_EXISTS = 3 + OPTIONAL_EXISTS = 4 + PATTERN_SYMLINK = 5 + PATTERN_COPY = 6 + PREPROCESS = 7 + CONTENT = 8 + + def __init__(self, path=None, fileobj=None): + """Create a new InstallManifest entry. + + If path is defined, the manifest will be populated with data from the + file path. + + If fileobj is defined, the manifest will be populated with data read + from the specified file object. + + Both path and fileobj cannot be defined. + """ + self._dests = {} + self._source_files = set() + + if path or fileobj: + with _auto_fileobj(path, fileobj, 'rb') as fh: + self._source_files.add(fh.name) + self._load_from_fileobj(fh) + + def _load_from_fileobj(self, fileobj): + version = fileobj.readline().rstrip() + if version not in ('1', '2', '3', '4', '5'): + raise UnreadableInstallManifest('Unknown manifest version: %s' % + version) + + for line in fileobj: + line = line.rstrip() + + fields = line.split(self.FIELD_SEPARATOR) + + record_type = int(fields[0]) + + if record_type == self.SYMLINK: + dest, source = fields[1:] + self.add_symlink(source, dest) + continue + + if record_type == self.COPY: + dest, source = fields[1:] + self.add_copy(source, dest) + continue + + if record_type == self.REQUIRED_EXISTS: + _, path = fields + self.add_required_exists(path) + continue + + if record_type == self.OPTIONAL_EXISTS: + _, path = fields + self.add_optional_exists(path) + continue + + if record_type == self.PATTERN_SYMLINK: + _, base, pattern, dest = fields[1:] + self.add_pattern_symlink(base, pattern, dest) + continue + + if record_type == self.PATTERN_COPY: + _, base, pattern, dest = fields[1:] + self.add_pattern_copy(base, pattern, dest) + continue + + if record_type == self.PREPROCESS: + dest, source, deps, marker, defines, warnings = fields[1:] + + self.add_preprocess(source, dest, deps, marker, + self._decode_field_entry(defines), + silence_missing_directive_warnings=bool(int(warnings))) + continue + + if record_type == self.CONTENT: + dest, content = fields[1:] + + self.add_content( + self._decode_field_entry(content).encode('utf-8'), dest) + continue + + # Don't fail for non-actionable items, allowing + # forward-compatibility with those we will add in the future. + if record_type >= 0: + raise UnreadableInstallManifest('Unknown record type: %d' % + record_type) + + def __len__(self): + return len(self._dests) + + def __contains__(self, item): + return item in self._dests + + def __eq__(self, other): + return isinstance(other, InstallManifest) and self._dests == other._dests + + def __neq__(self, other): + return not self.__eq__(other) + + def __ior__(self, other): + if not isinstance(other, InstallManifest): + raise ValueError('Can only | with another instance of InstallManifest.') + + # We must copy source files to ourselves so extra dependencies from + # the preprocessor are taken into account. Ideally, we would track + # which source file each entry came from. However, this is more + # complicated and not yet implemented. The current implementation + # will result in over invalidation, possibly leading to performance + # loss. + self._source_files |= other._source_files + + for dest in sorted(other._dests): + self._add_entry(dest, other._dests[dest]) + + return self + + def _encode_field_entry(self, data): + """Converts an object into a format that can be stored in the manifest file. + + Complex data types, such as ``dict``, need to be converted into a text + representation before they can be written to a file. + """ + return json.dumps(data, sort_keys=True) + + def _decode_field_entry(self, data): + """Restores an object from a format that can be stored in the manifest file. + + Complex data types, such as ``dict``, need to be converted into a text + representation before they can be written to a file. + """ + return json.loads(data) + + def write(self, path=None, fileobj=None): + """Serialize this manifest to a file or file object. + + If path is specified, that file will be written to. If fileobj is specified, + the serialized content will be written to that file object. + + It is an error if both are specified. + """ + with _auto_fileobj(path, fileobj, 'wb') as fh: + fh.write('%d\n' % self.CURRENT_VERSION) + + for dest in sorted(self._dests): + entry = self._dests[dest] + + parts = ['%d' % entry[0], dest] + parts.extend(entry[1:]) + fh.write('%s\n' % self.FIELD_SEPARATOR.join( + p.encode('utf-8') for p in parts)) + + def add_symlink(self, source, dest): + """Add a symlink to this manifest. + + dest will be a symlink to source. + """ + self._add_entry(dest, (self.SYMLINK, source)) + + def add_copy(self, source, dest): + """Add a copy to this manifest. + + source will be copied to dest. + """ + self._add_entry(dest, (self.COPY, source)) + + def add_required_exists(self, dest): + """Record that a destination file must exist. + + This effectively prevents the listed file from being deleted. + """ + self._add_entry(dest, (self.REQUIRED_EXISTS,)) + + def add_optional_exists(self, dest): + """Record that a destination file may exist. + + This effectively prevents the listed file from being deleted. Unlike a + "required exists" file, files of this type do not raise errors if the + destination file does not exist. + """ + self._add_entry(dest, (self.OPTIONAL_EXISTS,)) + + def add_pattern_symlink(self, base, pattern, dest): + """Add a pattern match that results in symlinks being created. + + A ``FileFinder`` will be created with its base set to ``base`` + and ``FileFinder.find()`` will be called with ``pattern`` to discover + source files. Each source file will be symlinked under ``dest``. + + Filenames under ``dest`` are constructed by taking the path fragment + after ``base`` and concatenating it with ``dest``. e.g. + + <base>/foo/bar.h -> <dest>/foo/bar.h + """ + self._add_entry(mozpath.join(base, pattern, dest), + (self.PATTERN_SYMLINK, base, pattern, dest)) + + def add_pattern_copy(self, base, pattern, dest): + """Add a pattern match that results in copies. + + See ``add_pattern_symlink()`` for usage. + """ + self._add_entry(mozpath.join(base, pattern, dest), + (self.PATTERN_COPY, base, pattern, dest)) + + def add_preprocess(self, source, dest, deps, marker='#', defines={}, + silence_missing_directive_warnings=False): + """Add a preprocessed file to this manifest. + + ``source`` will be passed through preprocessor.py, and the output will be + written to ``dest``. + """ + self._add_entry(dest, ( + self.PREPROCESS, + source, + deps, + marker, + self._encode_field_entry(defines), + '1' if silence_missing_directive_warnings else '0', + )) + + def add_content(self, content, dest): + """Add a file with the given content.""" + self._add_entry(dest, ( + self.CONTENT, + self._encode_field_entry(content), + )) + + def _add_entry(self, dest, entry): + if dest in self._dests: + raise ValueError('Item already in manifest: %s' % dest) + + self._dests[dest] = entry + + def populate_registry(self, registry, defines_override={}): + """Populate a mozpack.copier.FileRegistry instance with data from us. + + The caller supplied a FileRegistry instance (or at least something that + conforms to its interface) and that instance is populated with data + from this manifest. + + Defines can be given to override the ones in the manifest for + preprocessing. + """ + for dest in sorted(self._dests): + entry = self._dests[dest] + install_type = entry[0] + + if install_type == self.SYMLINK: + registry.add(dest, AbsoluteSymlinkFile(entry[1])) + continue + + if install_type == self.COPY: + registry.add(dest, File(entry[1])) + continue + + if install_type == self.REQUIRED_EXISTS: + registry.add(dest, ExistingFile(required=True)) + continue + + if install_type == self.OPTIONAL_EXISTS: + registry.add(dest, ExistingFile(required=False)) + continue + + if install_type in (self.PATTERN_SYMLINK, self.PATTERN_COPY): + _, base, pattern, dest = entry + finder = FileFinder(base, find_executables=False) + paths = [f[0] for f in finder.find(pattern)] + + if install_type == self.PATTERN_SYMLINK: + cls = AbsoluteSymlinkFile + else: + cls = File + + for path in paths: + source = mozpath.join(base, path) + registry.add(mozpath.join(dest, path), cls(source)) + + continue + + if install_type == self.PREPROCESS: + defines = self._decode_field_entry(entry[4]) + if defines_override: + defines.update(defines_override) + registry.add(dest, PreprocessedFile(entry[1], + depfile_path=entry[2], + marker=entry[3], + defines=defines, + extra_depends=self._source_files, + silence_missing_directive_warnings=bool(int(entry[5])))) + + continue + + if install_type == self.CONTENT: + # GeneratedFile expect the buffer interface, which the unicode + # type doesn't have, so encode to a str. + content = self._decode_field_entry(entry[1]).encode('utf-8') + registry.add(dest, GeneratedFile(content)) + continue + + raise Exception('Unknown install type defined in manifest: %d' % + install_type) + + +class InstallManifestNoSymlinks(InstallManifest): + """Like InstallManifest, but files are never installed as symbolic links. + Instead, they are always copied. + """ + + def add_symlink(self, source, dest): + """A wrapper that accept symlink entries and install file copies. + + source will be copied to dest. + """ + self.add_copy(source, dest) + + def add_pattern_symlink(self, base, pattern, dest): + """A wrapper that accepts symlink patterns and installs file copies. + + Files discovered with ``pattern`` will be copied to ``dest``. + """ + self.add_pattern_copy(base, pattern, dest) diff --git a/python/mozbuild/mozpack/mozjar.py b/python/mozbuild/mozpack/mozjar.py new file mode 100644 index 000000000..a1ada8594 --- /dev/null +++ b/python/mozbuild/mozpack/mozjar.py @@ -0,0 +1,816 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +from io import BytesIO +import struct +import zlib +import os +from zipfile import ( + ZIP_STORED, + ZIP_DEFLATED, +) +from collections import OrderedDict +from urlparse import urlparse, ParseResult +import mozpack.path as mozpath + +JAR_STORED = ZIP_STORED +JAR_DEFLATED = ZIP_DEFLATED +MAX_WBITS = 15 + + +class JarReaderError(Exception): + '''Error type for Jar reader errors.''' + + +class JarWriterError(Exception): + '''Error type for Jar writer errors.''' + + +class JarStruct(object): + ''' + Helper used to define ZIP archive raw data structures. Data structures + handled by this helper all start with a magic number, defined in + subclasses MAGIC field as a 32-bits unsigned integer, followed by data + structured as described in subclasses STRUCT field. + + The STRUCT field contains a list of (name, type) pairs where name is a + field name, and the type can be one of 'uint32', 'uint16' or one of the + field names. In the latter case, the field is considered to be a string + buffer with a length given in that field. + For example, + STRUCT = [ + ('version', 'uint32'), + ('filename_size', 'uint16'), + ('filename', 'filename_size') + ] + describes a structure with a 'version' 32-bits unsigned integer field, + followed by a 'filename_size' 16-bits unsigned integer field, followed by a + filename_size-long string buffer 'filename'. + + Fields that are used as other fields size are not stored in objects. In the + above example, an instance of such subclass would only have two attributes: + obj['version'] + obj['filename'] + filename_size would be obtained with len(obj['filename']). + + JarStruct subclasses instances can be either initialized from existing data + (deserialized), or with empty fields. + ''' + + TYPE_MAPPING = {'uint32': ('I', 4), 'uint16': ('H', 2)} + + def __init__(self, data=None): + ''' + Create an instance from the given data. Data may be omitted to create + an instance with empty fields. + ''' + assert self.MAGIC and isinstance(self.STRUCT, OrderedDict) + self.size_fields = set(t for t in self.STRUCT.itervalues() + if not t in JarStruct.TYPE_MAPPING) + self._values = {} + if data: + self._init_data(data) + else: + self._init_empty() + + def _init_data(self, data): + ''' + Initialize an instance from data, following the data structure + described in self.STRUCT. The self.MAGIC signature is expected at + data[:4]. + ''' + assert data is not None + self.signature, size = JarStruct.get_data('uint32', data) + if self.signature != self.MAGIC: + raise JarReaderError('Bad magic') + offset = size + # For all fields used as other fields sizes, keep track of their value + # separately. + sizes = dict((t, 0) for t in self.size_fields) + for name, t in self.STRUCT.iteritems(): + if t in JarStruct.TYPE_MAPPING: + value, size = JarStruct.get_data(t, data[offset:]) + else: + size = sizes[t] + value = data[offset:offset + size] + if isinstance(value, memoryview): + value = value.tobytes() + if not name in sizes: + self._values[name] = value + else: + sizes[name] = value + offset += size + + def _init_empty(self): + ''' + Initialize an instance with empty fields. + ''' + self.signature = self.MAGIC + for name, t in self.STRUCT.iteritems(): + if name in self.size_fields: + continue + self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else '' + + @staticmethod + def get_data(type, data): + ''' + Deserialize a single field of given type (must be one of + JarStruct.TYPE_MAPPING) at the given offset in the given data. + ''' + assert type in JarStruct.TYPE_MAPPING + assert data is not None + format, size = JarStruct.TYPE_MAPPING[type] + data = data[:size] + if isinstance(data, memoryview): + data = data.tobytes() + return struct.unpack('<' + format, data)[0], size + + def serialize(self): + ''' + Serialize the data structure according to the data structure definition + from self.STRUCT. + ''' + serialized = struct.pack('<I', self.signature) + sizes = dict((t, name) for name, t in self.STRUCT.iteritems() + if not t in JarStruct.TYPE_MAPPING) + for name, t in self.STRUCT.iteritems(): + if t in JarStruct.TYPE_MAPPING: + format, size = JarStruct.TYPE_MAPPING[t] + if name in sizes: + value = len(self[sizes[name]]) + else: + value = self[name] + serialized += struct.pack('<' + format, value) + else: + serialized += self[name] + return serialized + + @property + def size(self): + ''' + Return the size of the data structure, given the current values of all + variable length fields. + ''' + size = JarStruct.TYPE_MAPPING['uint32'][1] + for name, type in self.STRUCT.iteritems(): + if type in JarStruct.TYPE_MAPPING: + size += JarStruct.TYPE_MAPPING[type][1] + else: + size += len(self[name]) + return size + + def __getitem__(self, key): + return self._values[key] + + def __setitem__(self, key, value): + if not key in self.STRUCT: + raise KeyError(key) + if key in self.size_fields: + raise AttributeError("can't set attribute") + self._values[key] = value + + def __contains__(self, key): + return key in self._values + + def __iter__(self): + return self._values.iteritems() + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, + ' '.join('%s=%s' % (n, v) for n, v in self)) + + +class JarCdirEnd(JarStruct): + ''' + End of central directory record. + ''' + MAGIC = 0x06054b50 + STRUCT = OrderedDict([ + ('disk_num', 'uint16'), + ('cdir_disk', 'uint16'), + ('disk_entries', 'uint16'), + ('cdir_entries', 'uint16'), + ('cdir_size', 'uint32'), + ('cdir_offset', 'uint32'), + ('comment_size', 'uint16'), + ('comment', 'comment_size'), + ]) + +CDIR_END_SIZE = JarCdirEnd().size + + +class JarCdirEntry(JarStruct): + ''' + Central directory file header + ''' + MAGIC = 0x02014b50 + STRUCT = OrderedDict([ + ('creator_version', 'uint16'), + ('min_version', 'uint16'), + ('general_flag', 'uint16'), + ('compression', 'uint16'), + ('lastmod_time', 'uint16'), + ('lastmod_date', 'uint16'), + ('crc32', 'uint32'), + ('compressed_size', 'uint32'), + ('uncompressed_size', 'uint32'), + ('filename_size', 'uint16'), + ('extrafield_size', 'uint16'), + ('filecomment_size', 'uint16'), + ('disknum', 'uint16'), + ('internal_attr', 'uint16'), + ('external_attr', 'uint32'), + ('offset', 'uint32'), + ('filename', 'filename_size'), + ('extrafield', 'extrafield_size'), + ('filecomment', 'filecomment_size'), + ]) + + +class JarLocalFileHeader(JarStruct): + ''' + Local file header + ''' + MAGIC = 0x04034b50 + STRUCT = OrderedDict([ + ('min_version', 'uint16'), + ('general_flag', 'uint16'), + ('compression', 'uint16'), + ('lastmod_time', 'uint16'), + ('lastmod_date', 'uint16'), + ('crc32', 'uint32'), + ('compressed_size', 'uint32'), + ('uncompressed_size', 'uint32'), + ('filename_size', 'uint16'), + ('extra_field_size', 'uint16'), + ('filename', 'filename_size'), + ('extra_field', 'extra_field_size'), + ]) + + +class JarFileReader(object): + ''' + File-like class for use by JarReader to give access to individual files + within a Jar archive. + ''' + def __init__(self, header, data): + ''' + Initialize a JarFileReader. header is the local file header + corresponding to the file in the jar archive, data a buffer containing + the file data. + ''' + assert header['compression'] in [JAR_DEFLATED, JAR_STORED] + self._data = data + # Copy some local file header fields. + for name in ['filename', 'compressed_size', + 'uncompressed_size', 'crc32']: + setattr(self, name, header[name]) + self.compressed = header['compression'] == JAR_DEFLATED + + def read(self, length=-1): + ''' + Read some amount of uncompressed data. + ''' + return self.uncompressed_data.read(length) + + def readlines(self): + ''' + Return a list containing all the lines of data in the uncompressed + data. + ''' + return self.read().splitlines(True) + + def __iter__(self): + ''' + Iterator, to support the "for line in fileobj" constructs. + ''' + return iter(self.readlines()) + + def seek(self, pos, whence=os.SEEK_SET): + ''' + Change the current position in the uncompressed data. Subsequent reads + will start from there. + ''' + return self.uncompressed_data.seek(pos, whence) + + def close(self): + ''' + Free the uncompressed data buffer. + ''' + self.uncompressed_data.close() + + @property + def compressed_data(self): + ''' + Return the raw compressed data. + ''' + return self._data[:self.compressed_size] + + @property + def uncompressed_data(self): + ''' + Return the uncompressed data. + ''' + if hasattr(self, '_uncompressed_data'): + return self._uncompressed_data + data = self.compressed_data + if self.compressed: + data = zlib.decompress(data.tobytes(), -MAX_WBITS) + else: + data = data.tobytes() + if len(data) != self.uncompressed_size: + raise JarReaderError('Corrupted file? %s' % self.filename) + self._uncompressed_data = BytesIO(data) + return self._uncompressed_data + + +class JarReader(object): + ''' + Class with methods to read Jar files. Can open standard jar files as well + as Mozilla jar files (see further details in the JarWriter documentation). + ''' + def __init__(self, file=None, fileobj=None, data=None): + ''' + Opens the given file as a Jar archive. Use the given file-like object + if one is given instead of opening the given file name. + ''' + if fileobj: + data = fileobj.read() + elif file: + data = open(file, 'rb').read() + self._data = memoryview(data) + # The End of Central Directory Record has a variable size because of + # comments it may contain, so scan for it from the end of the file. + offset = -CDIR_END_SIZE + while True: + signature = JarStruct.get_data('uint32', self._data[offset:])[0] + if signature == JarCdirEnd.MAGIC: + break + if offset == -len(self._data): + raise JarReaderError('Not a jar?') + offset -= 1 + self._cdir_end = JarCdirEnd(self._data[offset:]) + + def close(self): + ''' + Free some resources associated with the Jar. + ''' + del self._data + + @property + def entries(self): + ''' + Return an ordered dict of central directory entries, indexed by + filename, in the order they appear in the Jar archive central + directory. Directory entries are skipped. + ''' + if hasattr(self, '_entries'): + return self._entries + preload = 0 + if self.is_optimized: + preload = JarStruct.get_data('uint32', self._data)[0] + entries = OrderedDict() + offset = self._cdir_end['cdir_offset'] + for e in xrange(self._cdir_end['cdir_entries']): + entry = JarCdirEntry(self._data[offset:]) + offset += entry.size + # Creator host system. 0 is MSDOS, 3 is Unix + host = entry['creator_version'] >> 8 + # External attributes values depend on host above. On Unix the + # higher bits are the stat.st_mode value. On MSDOS, the lower bits + # are the FAT attributes. + xattr = entry['external_attr'] + # Skip directories + if (host == 0 and xattr & 0x10) or (host == 3 and + xattr & (040000 << 16)): + continue + entries[entry['filename']] = entry + if entry['offset'] < preload: + self._last_preloaded = entry['filename'] + self._entries = entries + return entries + + @property + def is_optimized(self): + ''' + Return whether the jar archive is optimized. + ''' + # In optimized jars, the central directory is at the beginning of the + # file, after a single 32-bits value, which is the length of data + # preloaded. + return self._cdir_end['cdir_offset'] == \ + JarStruct.TYPE_MAPPING['uint32'][1] + + @property + def last_preloaded(self): + ''' + Return the name of the last file that is set to be preloaded. + See JarWriter documentation for more details on preloading. + ''' + if hasattr(self, '_last_preloaded'): + return self._last_preloaded + self._last_preloaded = None + self.entries + return self._last_preloaded + + def _getreader(self, entry): + ''' + Helper to create a JarFileReader corresponding to the given central + directory entry. + ''' + header = JarLocalFileHeader(self._data[entry['offset']:]) + for key, value in entry: + if key in header and header[key] != value: + raise JarReaderError('Central directory and file header ' + + 'mismatch. Corrupted archive?') + return JarFileReader(header, + self._data[entry['offset'] + header.size:]) + + def __iter__(self): + ''' + Iterate over all files in the Jar archive, in the form of + JarFileReaders. + for file in jarReader: + ... + ''' + for entry in self.entries.itervalues(): + yield self._getreader(entry) + + def __getitem__(self, name): + ''' + Get a JarFileReader for the given file name. + ''' + return self._getreader(self.entries[name]) + + def __contains__(self, name): + ''' + Return whether the given file name appears in the Jar archive. + ''' + return name in self.entries + + +class JarWriter(object): + ''' + Class with methods to write Jar files. Can write more-or-less standard jar + archives as well as jar archives optimized for Gecko. See the documentation + for the close() member function for a description of both layouts. + ''' + def __init__(self, file=None, fileobj=None, compress=True, optimize=True, + compress_level=9): + ''' + Initialize a Jar archive in the given file. Use the given file-like + object if one is given instead of opening the given file name. + The compress option determines the default behavior for storing data + in the jar archive. The optimize options determines whether the jar + archive should be optimized for Gecko or not. ``compress_level`` + defines the zlib compression level. It must be a value between 0 and 9 + and defaults to 9, the highest and slowest level of compression. + ''' + if fileobj: + self._data = fileobj + else: + self._data = open(file, 'wb') + self._compress = compress + self._compress_level = compress_level + self._contents = OrderedDict() + self._last_preloaded = None + self._optimize = optimize + + def __enter__(self): + ''' + Context manager __enter__ method for JarWriter. + ''' + return self + + def __exit__(self, type, value, tb): + ''' + Context manager __exit__ method for JarWriter. + ''' + self.finish() + + def finish(self): + ''' + Flush and close the Jar archive. + + Standard jar archives are laid out like the following: + - Local file header 1 + - File data 1 + - Local file header 2 + - File data 2 + - (...) + - Central directory entry pointing at Local file header 1 + - Central directory entry pointing at Local file header 2 + - (...) + - End of central directory, pointing at first central directory + entry. + + Jar archives optimized for Gecko are laid out like the following: + - 32-bits unsigned integer giving the amount of data to preload. + - Central directory entry pointing at Local file header 1 + - Central directory entry pointing at Local file header 2 + - (...) + - End of central directory, pointing at first central directory + entry. + - Local file header 1 + - File data 1 + - Local file header 2 + - File data 2 + - (...) + - End of central directory, pointing at first central directory + entry. + The duplication of the End of central directory is to accomodate some + Zip reading tools that want an end of central directory structure to + follow the central directory entries. + ''' + offset = 0 + headers = {} + preload_size = 0 + # Prepare central directory entries + for entry, content in self._contents.itervalues(): + header = JarLocalFileHeader() + for name in entry.STRUCT: + if name in header: + header[name] = entry[name] + entry['offset'] = offset + offset += len(content) + header.size + if entry['filename'] == self._last_preloaded: + preload_size = offset + headers[entry] = header + # Prepare end of central directory + end = JarCdirEnd() + end['disk_entries'] = len(self._contents) + end['cdir_entries'] = end['disk_entries'] + end['cdir_size'] = reduce(lambda x, y: x + y[0].size, + self._contents.values(), 0) + # On optimized archives, store the preloaded size and the central + # directory entries, followed by the first end of central directory. + if self._optimize: + end['cdir_offset'] = 4 + offset = end['cdir_size'] + end['cdir_offset'] + end.size + if preload_size: + preload_size += offset + self._data.write(struct.pack('<I', preload_size)) + for entry, _ in self._contents.itervalues(): + entry['offset'] += offset + self._data.write(entry.serialize()) + self._data.write(end.serialize()) + # Store local file entries followed by compressed data + for entry, content in self._contents.itervalues(): + self._data.write(headers[entry].serialize()) + self._data.write(content) + # On non optimized archives, store the central directory entries. + if not self._optimize: + end['cdir_offset'] = offset + for entry, _ in self._contents.itervalues(): + self._data.write(entry.serialize()) + # Store the end of central directory. + self._data.write(end.serialize()) + self._data.close() + + def add(self, name, data, compress=None, mode=None, skip_duplicates=False): + ''' + Add a new member to the jar archive, with the given name and the given + data. + The compress option indicates if the given data should be compressed + (True), not compressed (False), or compressed according to the default + defined when creating the JarWriter (None). + When the data should be compressed (True or None with self.compress == + True), it is only really compressed if the compressed size is smaller + than the uncompressed size. + The mode option gives the unix permissions that should be stored + for the jar entry. + If a duplicated member is found skip_duplicates will prevent raising + an exception if set to True. + The given data may be a buffer, a file-like instance, a Deflater or a + JarFileReader instance. The latter two allow to avoid uncompressing + data to recompress it. + ''' + name = mozpath.normsep(name) + + if name in self._contents and not skip_duplicates: + raise JarWriterError("File %s already in JarWriter" % name) + if compress is None: + compress = self._compress + if (isinstance(data, JarFileReader) and data.compressed == compress) \ + or (isinstance(data, Deflater) and data.compress == compress): + deflater = data + else: + deflater = Deflater(compress, compress_level=self._compress_level) + if isinstance(data, basestring): + deflater.write(data) + elif hasattr(data, 'read'): + if hasattr(data, 'seek'): + data.seek(0) + deflater.write(data.read()) + else: + raise JarWriterError("Don't know how to handle %s" % + type(data)) + # Fill a central directory entry for this new member. + entry = JarCdirEntry() + entry['creator_version'] = 20 + if mode is not None: + # Set creator host system (upper byte of creator_version) + # to 3 (Unix) so mode is honored when there is one. + entry['creator_version'] |= 3 << 8 + entry['external_attr'] = (mode & 0xFFFF) << 16L + if deflater.compressed: + entry['min_version'] = 20 # Version 2.0 supports deflated streams + entry['general_flag'] = 2 # Max compression + entry['compression'] = JAR_DEFLATED + else: + entry['min_version'] = 10 # Version 1.0 for stored streams + entry['general_flag'] = 0 + entry['compression'] = JAR_STORED + # January 1st, 2010. See bug 592369. + entry['lastmod_date'] = ((2010 - 1980) << 9) | (1 << 5) | 1 + entry['lastmod_time'] = 0 + entry['crc32'] = deflater.crc32 + entry['compressed_size'] = deflater.compressed_size + entry['uncompressed_size'] = deflater.uncompressed_size + entry['filename'] = name + self._contents[name] = entry, deflater.compressed_data + + def preload(self, files): + ''' + Set which members of the jar archive should be preloaded when opening + the archive in Gecko. This reorders the members according to the order + of given list. + ''' + new_contents = OrderedDict() + for f in files: + if not f in self._contents: + continue + new_contents[f] = self._contents[f] + self._last_preloaded = f + for f in self._contents: + if not f in new_contents: + new_contents[f] = self._contents[f] + self._contents = new_contents + + +class Deflater(object): + ''' + File-like interface to zlib compression. The data is actually not + compressed unless the compressed form is smaller than the uncompressed + data. + ''' + def __init__(self, compress=True, compress_level=9): + ''' + Initialize a Deflater. The compress argument determines whether to + try to compress at all. + ''' + self._data = BytesIO() + self.compress = compress + if compress: + self._deflater = zlib.compressobj(compress_level, zlib.DEFLATED, + -MAX_WBITS) + self._deflated = BytesIO() + else: + self._deflater = None + + def write(self, data): + ''' + Append a buffer to the Deflater. + ''' + self._data.write(data) + if self.compress: + if self._deflater: + if isinstance(data, memoryview): + data = data.tobytes() + self._deflated.write(self._deflater.compress(data)) + else: + raise JarWriterError("Can't write after flush") + + def close(self): + ''' + Close the Deflater. + ''' + self._data.close() + if self.compress: + self._deflated.close() + + def _flush(self): + ''' + Flush the underlying zlib compression object. + ''' + if self.compress and self._deflater: + self._deflated.write(self._deflater.flush()) + self._deflater = None + + @property + def compressed(self): + ''' + Return whether the data should be compressed. + ''' + return self._compressed_size < self.uncompressed_size + + @property + def _compressed_size(self): + ''' + Return the real compressed size of the data written to the Deflater. If + the Deflater is set not to compress, the uncompressed size is returned. + Otherwise, the actual compressed size is returned, whether or not it is + a win over the uncompressed size. + ''' + if self.compress: + self._flush() + return self._deflated.tell() + return self.uncompressed_size + + @property + def compressed_size(self): + ''' + Return the compressed size of the data written to the Deflater. If the + Deflater is set not to compress, the uncompressed size is returned. + Otherwise, if the data should not be compressed (the real compressed + size is bigger than the uncompressed size), return the uncompressed + size. + ''' + if self.compressed: + return self._compressed_size + return self.uncompressed_size + + @property + def uncompressed_size(self): + ''' + Return the size of the data written to the Deflater. + ''' + return self._data.tell() + + @property + def crc32(self): + ''' + Return the crc32 of the data written to the Deflater. + ''' + return zlib.crc32(self._data.getvalue()) & 0xffffffff + + @property + def compressed_data(self): + ''' + Return the compressed data, if the data should be compressed (real + compressed size smaller than the uncompressed size), or the + uncompressed data otherwise. + ''' + if self.compressed: + return self._deflated.getvalue() + return self._data.getvalue() + + +class JarLog(dict): + ''' + Helper to read the file Gecko generates when setting MOZ_JAR_LOG_FILE. + The jar log is then available as a dict with the jar path as key (see + canonicalize for more details on the key value), and the corresponding + access log as a list value. Only the first access to a given member of + a jar is stored. + ''' + def __init__(self, file=None, fileobj=None): + if not fileobj: + fileobj = open(file, 'r') + urlmap = {} + for line in fileobj: + url, path = line.strip().split(None, 1) + if not url or not path: + continue + if url not in urlmap: + urlmap[url] = JarLog.canonicalize(url) + jar = urlmap[url] + entry = self.setdefault(jar, []) + if path not in entry: + entry.append(path) + + @staticmethod + def canonicalize(url): + ''' + The jar path is stored in a MOZ_JAR_LOG_FILE log as a url. This method + returns a unique value corresponding to such urls. + - file:///{path} becomes {path} + - jar:file:///{path}!/{subpath} becomes ({path}, {subpath}) + - jar:jar:file:///{path}!/{subpath}!/{subpath2} becomes + ({path}, {subpath}, {subpath2}) + ''' + if not isinstance(url, ParseResult): + # Assume that if it doesn't start with jar: or file:, it's a path. + if not url.startswith(('jar:', 'file:')): + url = 'file:///' + os.path.abspath(url) + url = urlparse(url) + assert url.scheme + assert url.scheme in ('jar', 'file') + if url.scheme == 'jar': + path = JarLog.canonicalize(url.path) + if isinstance(path, tuple): + return path[:-1] + tuple(path[-1].split('!/', 1)) + return tuple(path.split('!/', 1)) + if url.scheme == 'file': + assert os.path.isabs(url.path) + path = url.path + # On Windows, url.path will be /drive:/path ; on Unix systems, + # /path. As we want drive:/path instead of /drive:/path on Windows, + # remove the leading /. + if os.path.isabs(path[1:]): + path = path[1:] + path = os.path.realpath(path) + return mozpath.normsep(os.path.normcase(path)) diff --git a/python/mozbuild/mozpack/packager/__init__.py b/python/mozbuild/mozpack/packager/__init__.py new file mode 100644 index 000000000..4c98ec3d3 --- /dev/null +++ b/python/mozbuild/mozpack/packager/__init__.py @@ -0,0 +1,408 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +from mozbuild.preprocessor import Preprocessor +import re +import os +from mozpack.errors import errors +from mozpack.chrome.manifest import ( + Manifest, + ManifestBinaryComponent, + ManifestChrome, + ManifestInterfaces, + is_manifest, + parse_manifest, +) +import mozpack.path as mozpath +from collections import deque + + +class Component(object): + ''' + Class that represents a component in a package manifest. + ''' + def __init__(self, name, destdir=''): + if name.find(' ') > 0: + errors.fatal('Malformed manifest: space in component name "%s"' + % component) + self._name = name + self._destdir = destdir + + def __repr__(self): + s = self.name + if self.destdir: + s += ' destdir="%s"' % self.destdir + return s + + @property + def name(self): + return self._name + + @property + def destdir(self): + return self._destdir + + @staticmethod + def _triples(lst): + ''' + Split [1, 2, 3, 4, 5, 6, 7] into [(1, 2, 3), (4, 5, 6)]. + ''' + return zip(*[iter(lst)] * 3) + + KEY_VALUE_RE = re.compile(r''' + \s* # optional whitespace. + ([a-zA-Z0-9_]+) # key. + \s*=\s* # optional space around =. + "([^"]*)" # value without surrounding quotes. + (?:\s+|$) + ''', re.VERBOSE) + + @staticmethod + def _split_options(string): + ''' + Split 'key1="value1" key2="value2"' into + {'key1':'value1', 'key2':'value2'}. + + Returned keys and values are all strings. + + Throws ValueError if the input is malformed. + ''' + options = {} + splits = Component.KEY_VALUE_RE.split(string) + if len(splits) % 3 != 1: + # This should never happen -- we expect to always split + # into ['', ('key', 'val', '')*]. + raise ValueError("Bad input") + if splits[0]: + raise ValueError('Unrecognized input ' + splits[0]) + for key, val, no_match in Component._triples(splits[1:]): + if no_match: + raise ValueError('Unrecognized input ' + no_match) + options[key] = val + return options + + @staticmethod + def _split_component_and_options(string): + ''' + Split 'name key1="value1" key2="value2"' into + ('name', {'key1':'value1', 'key2':'value2'}). + + Returned name, keys and values are all strings. + + Raises ValueError if the input is malformed. + ''' + splits = string.strip().split(None, 1) + if not splits: + raise ValueError('No component found') + component = splits[0].strip() + if not component: + raise ValueError('No component found') + if not re.match('[a-zA-Z0-9_\-]+$', component): + raise ValueError('Bad component name ' + component) + options = Component._split_options(splits[1]) if len(splits) > 1 else {} + return component, options + + @staticmethod + def from_string(string): + ''' + Create a component from a string. + ''' + try: + name, options = Component._split_component_and_options(string) + except ValueError as e: + errors.fatal('Malformed manifest: %s' % e) + return + destdir = options.pop('destdir', '') + if options: + errors.fatal('Malformed manifest: options %s not recognized' + % options.keys()) + return Component(name, destdir=destdir) + + +class PackageManifestParser(object): + ''' + Class for parsing of a package manifest, after preprocessing. + + A package manifest is a list of file paths, with some syntaxic sugar: + [] designates a toplevel component. Example: [xpcom] + - in front of a file specifies it to be removed + * wildcard support + ** expands to all files and zero or more directories + ; file comment + + The parser takes input from the preprocessor line by line, and pushes + parsed information to a sink object. + + The add and remove methods of the sink object are called with the + current Component instance and a path. + ''' + def __init__(self, sink): + ''' + Initialize the package manifest parser with the given sink. + ''' + self._component = Component('') + self._sink = sink + + def handle_line(self, str): + ''' + Handle a line of input and push the parsed information to the sink + object. + ''' + # Remove comments. + str = str.strip() + if not str or str.startswith(';'): + return + if str.startswith('[') and str.endswith(']'): + self._component = Component.from_string(str[1:-1]) + elif str.startswith('-'): + str = str[1:] + self._sink.remove(self._component, str) + elif ',' in str: + errors.fatal('Incompatible syntax') + else: + self._sink.add(self._component, str) + + +class PreprocessorOutputWrapper(object): + ''' + File-like helper to handle the preprocessor output and send it to a parser. + The parser's handle_line method is called in the relevant errors.context. + ''' + def __init__(self, preprocessor, parser): + self._parser = parser + self._pp = preprocessor + + def write(self, str): + file = os.path.normpath(os.path.abspath(self._pp.context['FILE'])) + with errors.context(file, self._pp.context['LINE']): + self._parser.handle_line(str) + + +def preprocess(input, parser, defines={}): + ''' + Preprocess the file-like input with the given defines, and send the + preprocessed output line by line to the given parser. + ''' + pp = Preprocessor() + pp.context.update(defines) + pp.do_filter('substitution') + pp.out = PreprocessorOutputWrapper(pp, parser) + pp.do_include(input) + + +def preprocess_manifest(sink, manifest, defines={}): + ''' + Preprocess the given file-like manifest with the given defines, and push + the parsed information to a sink. See PackageManifestParser documentation + for more details on the sink. + ''' + preprocess(manifest, PackageManifestParser(sink), defines) + + +class CallDeque(deque): + ''' + Queue of function calls to make. + ''' + def append(self, function, *args): + deque.append(self, (errors.get_context(), function, args)) + + def execute(self): + while True: + try: + context, function, args = self.popleft() + except IndexError: + return + if context: + with errors.context(context[0], context[1]): + function(*args) + else: + function(*args) + + +class SimplePackager(object): + ''' + Helper used to translate and buffer instructions from the + SimpleManifestSink to a formatter. Formatters expect some information to be + given first that the simple manifest contents can't guarantee before the + end of the input. + ''' + def __init__(self, formatter): + self.formatter = formatter + # Queue for formatter.add_interfaces()/add_manifest() calls. + self._queue = CallDeque() + # Queue for formatter.add_manifest() calls for ManifestChrome. + self._chrome_queue = CallDeque() + # Queue for formatter.add() calls. + self._file_queue = CallDeque() + # All paths containing addons. (key is path, value is whether it + # should be packed or unpacked) + self._addons = {} + # All manifest paths imported. + self._manifests = set() + # All manifest paths included from some other manifest. + self._included_manifests = {} + self._closed = False + + # Parsing RDF is complex, and would require an external library to do + # properly. Just go with some hackish but probably sufficient regexp + UNPACK_ADDON_RE = re.compile(r'''(?: + <em:unpack>true</em:unpack> + |em:unpack=(?P<quote>["']?)true(?P=quote) + )''', re.VERBOSE) + + def add(self, path, file): + ''' + Add the given BaseFile instance with the given path. + ''' + assert not self._closed + if is_manifest(path): + self._add_manifest_file(path, file) + elif path.endswith('.xpt'): + self._queue.append(self.formatter.add_interfaces, path, file) + else: + self._file_queue.append(self.formatter.add, path, file) + if mozpath.basename(path) == 'install.rdf': + addon = True + install_rdf = file.open().read() + if self.UNPACK_ADDON_RE.search(install_rdf): + addon = 'unpacked' + self._addons[mozpath.dirname(path)] = addon + + def _add_manifest_file(self, path, file): + ''' + Add the given BaseFile with manifest file contents with the given path. + ''' + self._manifests.add(path) + base = '' + if hasattr(file, 'path'): + # Find the directory the given path is relative to. + b = mozpath.normsep(file.path) + if b.endswith('/' + path) or b == path: + base = os.path.normpath(b[:-len(path)]) + for e in parse_manifest(base, path, file.open()): + # ManifestResources need to be given after ManifestChrome, so just + # put all ManifestChrome in a separate queue to make them first. + if isinstance(e, ManifestChrome): + # e.move(e.base) just returns a clone of the entry. + self._chrome_queue.append(self.formatter.add_manifest, + e.move(e.base)) + elif not isinstance(e, (Manifest, ManifestInterfaces)): + self._queue.append(self.formatter.add_manifest, e.move(e.base)) + # If a binary component is added to an addon, prevent the addon + # from being packed. + if isinstance(e, ManifestBinaryComponent): + addon = mozpath.basedir(e.base, self._addons) + if addon: + self._addons[addon] = 'unpacked' + if isinstance(e, Manifest): + if e.flags: + errors.fatal('Flags are not supported on ' + + '"manifest" entries') + self._included_manifests[e.path] = path + + def get_bases(self, addons=True): + ''' + Return all paths under which root manifests have been found. Root + manifests are manifests that are included in no other manifest. + `addons` indicates whether to include addon bases as well. + ''' + all_bases = set(mozpath.dirname(m) + for m in self._manifests + - set(self._included_manifests)) + if not addons: + all_bases -= set(self._addons) + else: + # If for some reason some detected addon doesn't have a + # non-included manifest. + all_bases |= set(self._addons) + return all_bases + + def close(self): + ''' + Push all instructions to the formatter. + ''' + self._closed = True + + bases = self.get_bases() + broken_bases = sorted( + m for m, includer in self._included_manifests.iteritems() + if mozpath.basedir(m, bases) != mozpath.basedir(includer, bases)) + for m in broken_bases: + errors.fatal('"%s" is included from "%s", which is outside "%s"' % + (m, self._included_manifests[m], + mozpath.basedir(m, bases))) + for base in sorted(bases): + self.formatter.add_base(base, self._addons.get(base, False)) + self._chrome_queue.execute() + self._queue.execute() + self._file_queue.execute() + + +class SimpleManifestSink(object): + ''' + Parser sink for "simple" package manifests. Simple package manifests use + the format described in the PackageManifestParser documentation, but don't + support file removals, and require manifests, interfaces and chrome data to + be explicitely listed. + Entries starting with bin/ are searched under bin/ in the FileFinder, but + are packaged without the bin/ prefix. + ''' + def __init__(self, finder, formatter): + ''' + Initialize the SimpleManifestSink. The given FileFinder is used to + get files matching the patterns given in the manifest. The given + formatter does the packaging job. + ''' + self._finder = finder + self.packager = SimplePackager(formatter) + self._closed = False + self._manifests = set() + + @staticmethod + def normalize_path(path): + ''' + Remove any bin/ prefix. + ''' + if mozpath.basedir(path, ['bin']) == 'bin': + return mozpath.relpath(path, 'bin') + return path + + def add(self, component, pattern): + ''' + Add files with the given pattern in the given component. + ''' + assert not self._closed + added = False + for p, f in self._finder.find(pattern): + added = True + if is_manifest(p): + self._manifests.add(p) + dest = mozpath.join(component.destdir, SimpleManifestSink.normalize_path(p)) + self.packager.add(dest, f) + if not added: + errors.error('Missing file(s): %s' % pattern) + + def remove(self, component, pattern): + ''' + Remove files with the given pattern in the given component. + ''' + assert not self._closed + errors.fatal('Removal is unsupported') + + def close(self, auto_root_manifest=True): + ''' + Add possibly missing bits and push all instructions to the formatter. + ''' + if auto_root_manifest: + # Simple package manifests don't contain the root manifests, so + # find and add them. + paths = [mozpath.dirname(m) for m in self._manifests] + path = mozpath.dirname(mozpath.commonprefix(paths)) + for p, f in self._finder.find(mozpath.join(path, + 'chrome.manifest')): + if not p in self._manifests: + self.packager.add(SimpleManifestSink.normalize_path(p), f) + self.packager.close() diff --git a/python/mozbuild/mozpack/packager/formats.py b/python/mozbuild/mozpack/packager/formats.py new file mode 100644 index 000000000..c4adabab0 --- /dev/null +++ b/python/mozbuild/mozpack/packager/formats.py @@ -0,0 +1,324 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +from mozpack.chrome.manifest import ( + Manifest, + ManifestInterfaces, + ManifestChrome, + ManifestBinaryComponent, + ManifestResource, +) +from urlparse import urlparse +import mozpack.path as mozpath +from mozpack.files import ( + ManifestFile, + XPTFile, +) +from mozpack.copier import ( + FileRegistry, + FileRegistrySubtree, + Jarrer, +) + +STARTUP_CACHE_PATHS = [ + 'jsloader', + 'jssubloader', +] + +''' +Formatters are classes receiving packaging instructions and creating the +appropriate package layout. + +There are three distinct formatters, each handling one of the different chrome +formats: + - flat: essentially, copies files from the source with the same file system + layout. Manifests entries are grouped in a single manifest per directory, + as well as XPT interfaces. + - jar: chrome content is packaged in jar files. + - omni: chrome content, modules, non-binary components, and many other + elements are packaged in an omnijar file for each base directory. + +The base interface provides the following methods: + - add_base(path [, addon]) + Register a base directory for an application or GRE, or an addon. + Base directories usually contain a root manifest (manifests not + included in any other manifest) named chrome.manifest. + The optional addon argument tells whether the base directory + is that of a packed addon (True), unpacked addon ('unpacked') or + otherwise (False). + - add(path, content) + Add the given content (BaseFile instance) at the given virtual path + - add_interfaces(path, content) + Add the given content (BaseFile instance) and link it to other + interfaces in the parent directory of the given virtual path. + - add_manifest(entry) + Add a ManifestEntry. + - contains(path) + Returns whether the given virtual path is known of the formatter. + +The virtual paths mentioned above are paths as they would be with a flat +chrome. + +Formatters all take a FileCopier instance they will fill with the packaged +data. +''' + + +class PiecemealFormatter(object): + ''' + Generic formatter that dispatches across different sub-formatters + according to paths. + ''' + def __init__(self, copier): + assert isinstance(copier, (FileRegistry, FileRegistrySubtree)) + self.copier = copier + self._sub_formatter = {} + self._frozen_bases = False + + def add_base(self, base, addon=False): + # Only allow to add a base directory before calls to _get_base() + assert not self._frozen_bases + assert base not in self._sub_formatter + self._add_base(base, addon) + + def _get_base(self, path): + ''' + Return the deepest base directory containing the given path. + ''' + self._frozen_bases = True + base = mozpath.basedir(path, self._sub_formatter.keys()) + relpath = mozpath.relpath(path, base) if base else path + return base, relpath + + def add(self, path, content): + base, relpath = self._get_base(path) + if base is None: + return self.copier.add(relpath, content) + return self._sub_formatter[base].add(relpath, content) + + def add_manifest(self, entry): + base, relpath = self._get_base(entry.base) + assert base is not None + return self._sub_formatter[base].add_manifest(entry.move(relpath)) + + def add_interfaces(self, path, content): + base, relpath = self._get_base(path) + assert base is not None + return self._sub_formatter[base].add_interfaces(relpath, content) + + def contains(self, path): + assert '*' not in path + base, relpath = self._get_base(path) + if base is None: + return self.copier.contains(relpath) + return self._sub_formatter[base].contains(relpath) + + +class FlatFormatter(PiecemealFormatter): + ''' + Formatter for the flat package format. + ''' + def _add_base(self, base, addon=False): + self._sub_formatter[base] = FlatSubFormatter( + FileRegistrySubtree(base, self.copier)) + + +class FlatSubFormatter(object): + ''' + Sub-formatter for the flat package format. + ''' + def __init__(self, copier): + assert isinstance(copier, (FileRegistry, FileRegistrySubtree)) + self.copier = copier + + def add(self, path, content): + self.copier.add(path, content) + + def add_manifest(self, entry): + # Store manifest entries in a single manifest per directory, named + # after their parent directory, except for root manifests, all named + # chrome.manifest. + if entry.base: + name = mozpath.basename(entry.base) + else: + name = 'chrome' + path = mozpath.normpath(mozpath.join(entry.base, '%s.manifest' % name)) + if not self.copier.contains(path): + # Add a reference to the manifest file in the parent manifest, if + # the manifest file is not a root manifest. + if entry.base: + parent = mozpath.dirname(entry.base) + relbase = mozpath.basename(entry.base) + relpath = mozpath.join(relbase, + mozpath.basename(path)) + self.add_manifest(Manifest(parent, relpath)) + self.copier.add(path, ManifestFile(entry.base)) + self.copier[path].add(entry) + + def add_interfaces(self, path, content): + # Interfaces in the same directory are all linked together in an + # interfaces.xpt file. + interfaces_path = mozpath.join(mozpath.dirname(path), + 'interfaces.xpt') + if not self.copier.contains(interfaces_path): + self.add_manifest(ManifestInterfaces(mozpath.dirname(path), + 'interfaces.xpt')) + self.copier.add(interfaces_path, XPTFile()) + self.copier[interfaces_path].add(content) + + def contains(self, path): + assert '*' not in path + return self.copier.contains(path) + + +class JarFormatter(PiecemealFormatter): + ''' + Formatter for the jar package format. Assumes manifest entries related to + chrome are registered before the chrome data files are added. Also assumes + manifest entries for resources are registered after chrome manifest + entries. + ''' + def __init__(self, copier, compress=True, optimize=True): + PiecemealFormatter.__init__(self, copier) + self._compress=compress + self._optimize=optimize + + def _add_base(self, base, addon=False): + if addon is True: + jarrer = Jarrer(self._compress, self._optimize) + self.copier.add(base + '.xpi', jarrer) + self._sub_formatter[base] = FlatSubFormatter(jarrer) + else: + self._sub_formatter[base] = JarSubFormatter( + FileRegistrySubtree(base, self.copier), + self._compress, self._optimize) + + +class JarSubFormatter(PiecemealFormatter): + ''' + Sub-formatter for the jar package format. It is a PiecemealFormatter that + dispatches between further sub-formatter for each of the jar files it + dispatches the chrome data to, and a FlatSubFormatter for the non-chrome + files. + ''' + def __init__(self, copier, compress=True, optimize=True): + PiecemealFormatter.__init__(self, copier) + self._frozen_chrome = False + self._compress = compress + self._optimize = optimize + self._sub_formatter[''] = FlatSubFormatter(copier) + + def _jarize(self, entry, relpath): + ''' + Transform a manifest entry in one pointing to chrome data in a jar. + Return the corresponding chrome path and the new entry. + ''' + base = entry.base + basepath = mozpath.split(relpath)[0] + chromepath = mozpath.join(base, basepath) + entry = entry.rebase(chromepath) \ + .move(mozpath.join(base, 'jar:%s.jar!' % basepath)) \ + .rebase(base) + return chromepath, entry + + def add_manifest(self, entry): + if isinstance(entry, ManifestChrome) and \ + not urlparse(entry.relpath).scheme: + chromepath, entry = self._jarize(entry, entry.relpath) + assert not self._frozen_chrome + if chromepath not in self._sub_formatter: + jarrer = Jarrer(self._compress, self._optimize) + self.copier.add(chromepath + '.jar', jarrer) + self._sub_formatter[chromepath] = FlatSubFormatter(jarrer) + elif isinstance(entry, ManifestResource) and \ + not urlparse(entry.target).scheme: + chromepath, new_entry = self._jarize(entry, entry.target) + if chromepath in self._sub_formatter: + entry = new_entry + PiecemealFormatter.add_manifest(self, entry) + + +class OmniJarFormatter(JarFormatter): + ''' + Formatter for the omnijar package format. + ''' + def __init__(self, copier, omnijar_name, compress=True, optimize=True, + non_resources=()): + JarFormatter.__init__(self, copier, compress, optimize) + self._omnijar_name = omnijar_name + self._non_resources = non_resources + + def _add_base(self, base, addon=False): + if addon: + JarFormatter._add_base(self, base, addon) + else: + # Initialize a chrome.manifest next to the omnijar file so that + # there's always a chrome.manifest file, even an empty one. + path = mozpath.normpath(mozpath.join(base, 'chrome.manifest')) + if not self.copier.contains(path): + self.copier.add(path, ManifestFile('')) + self._sub_formatter[base] = OmniJarSubFormatter( + FileRegistrySubtree(base, self.copier), self._omnijar_name, + self._compress, self._optimize, self._non_resources) + + +class OmniJarSubFormatter(PiecemealFormatter): + ''' + Sub-formatter for the omnijar package format. It is a PiecemealFormatter + that dispatches between a FlatSubFormatter for the resources data and + another FlatSubFormatter for the other files. + ''' + def __init__(self, copier, omnijar_name, compress=True, optimize=True, + non_resources=()): + PiecemealFormatter.__init__(self, copier) + self._omnijar_name = omnijar_name + self._compress = compress + self._optimize = optimize + self._non_resources = non_resources + self._sub_formatter[''] = FlatSubFormatter(copier) + jarrer = Jarrer(self._compress, self._optimize) + self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer) + + def _get_base(self, path): + base = self._omnijar_name if self.is_resource(path) else '' + # Only add the omnijar file if something ends up in it. + if base and not self.copier.contains(base): + self.copier.add(base, self._sub_formatter[base].copier) + return base, path + + def add_manifest(self, entry): + base = '' + if not isinstance(entry, ManifestBinaryComponent): + base = self._omnijar_name + formatter = self._sub_formatter[base] + return formatter.add_manifest(entry) + + def is_resource(self, path): + ''' + Return whether the given path corresponds to a resource to be put in an + omnijar archive. + ''' + if any(mozpath.match(path, p.replace('*', '**')) + for p in self._non_resources): + return False + path = mozpath.split(path) + if path[0] == 'chrome': + return len(path) == 1 or path[1] != 'icons' + if path[0] == 'components': + return path[-1].endswith(('.js', '.xpt')) + if path[0] == 'res': + return len(path) == 1 or \ + (path[1] != 'cursors' and path[1] != 'MainMenu.nib') + if path[0] == 'defaults': + return len(path) != 3 or \ + not (path[2] == 'channel-prefs.js' and + path[1] in ['pref', 'preferences']) + return path[0] in [ + 'modules', + 'greprefs.js', + 'hyphenation', + 'update.locale', + ] or path[0] in STARTUP_CACHE_PATHS diff --git a/python/mozbuild/mozpack/packager/l10n.py b/python/mozbuild/mozpack/packager/l10n.py new file mode 100644 index 000000000..758064f59 --- /dev/null +++ b/python/mozbuild/mozpack/packager/l10n.py @@ -0,0 +1,259 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +''' +Replace localized parts of a packaged directory with data from a langpack +directory. +''' + +import os +import mozpack.path as mozpath +from mozpack.packager.formats import ( + FlatFormatter, + JarFormatter, + OmniJarFormatter, +) +from mozpack.packager import ( + Component, + SimplePackager, + SimpleManifestSink, +) +from mozpack.files import ( + ComposedFinder, + ManifestFile, +) +from mozpack.copier import ( + FileCopier, + Jarrer, +) +from mozpack.chrome.manifest import ( + ManifestLocale, + ManifestEntryWithRelPath, + is_manifest, + ManifestChrome, + Manifest, +) +from mozpack.errors import errors +from mozpack.packager.unpack import UnpackFinder +from createprecomplete import generate_precomplete + + +class LocaleManifestFinder(object): + def __init__(self, finder): + entries = self.entries = [] + bases = self.bases = [] + + class MockFormatter(object): + def add_interfaces(self, path, content): + pass + + def add(self, path, content): + pass + + def add_manifest(self, entry): + if entry.localized: + entries.append(entry) + + def add_base(self, base, addon=False): + bases.append(base) + + # SimplePackager rejects "manifest foo.manifest" entries with + # additional flags (such as "manifest foo.manifest application=bar"). + # Those type of entries are used by language packs to work as addons, + # but are not necessary for the purpose of l10n repacking. So we wrap + # the finder in order to remove those entries. + class WrapFinder(object): + def __init__(self, finder): + self._finder = finder + + def find(self, pattern): + for p, f in self._finder.find(pattern): + if isinstance(f, ManifestFile): + unwanted = [ + e for e in f._entries + if isinstance(e, Manifest) and e.flags + ] + if unwanted: + f = ManifestFile( + f._base, + [e for e in f._entries if e not in unwanted]) + yield p, f + + sink = SimpleManifestSink(WrapFinder(finder), MockFormatter()) + sink.add(Component(''), '*') + sink.close(False) + + # Find unique locales used in these manifest entries. + self.locales = list(set(e.id for e in self.entries + if isinstance(e, ManifestLocale))) + + +def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()): + app = LocaleManifestFinder(app_finder) + l10n = LocaleManifestFinder(l10n_finder) + + # The code further below assumes there's only one locale replaced with + # another one. + if len(app.locales) > 1: + errors.fatal("Multiple app locales aren't supported: " + + ",".join(app.locales)) + if len(l10n.locales) > 1: + errors.fatal("Multiple l10n locales aren't supported: " + + ",".join(l10n.locales)) + locale = app.locales[0] + l10n_locale = l10n.locales[0] + + # For each base directory, store what path a locale chrome package name + # corresponds to. + # e.g., for the following entry under app/chrome: + # locale foo en-US path/to/files + # keep track that the locale path for foo in app is + # app/chrome/path/to/files. + l10n_paths = {} + for e in l10n.entries: + if isinstance(e, ManifestChrome): + base = mozpath.basedir(e.path, app.bases) + l10n_paths.setdefault(base, {}) + l10n_paths[base][e.name] = e.path + + # For chrome and non chrome files or directories, store what langpack path + # corresponds to a package path. + paths = {} + for e in app.entries: + if isinstance(e, ManifestEntryWithRelPath): + base = mozpath.basedir(e.path, app.bases) + if base not in l10n_paths: + errors.fatal("Locale doesn't contain %s/" % base) + # Allow errors to accumulate + continue + if e.name not in l10n_paths[base]: + errors.fatal("Locale doesn't have a manifest entry for '%s'" % + e.name) + # Allow errors to accumulate + continue + paths[e.path] = l10n_paths[base][e.name] + + for pattern in non_chrome: + for base in app.bases: + path = mozpath.join(base, pattern) + left = set(p for p, f in app_finder.find(path)) + right = set(p for p, f in l10n_finder.find(path)) + for p in right: + paths[p] = p + for p in left - right: + paths[p] = None + + # Create a new package, with non localized bits coming from the original + # package, and localized bits coming from the langpack. + packager = SimplePackager(formatter) + for p, f in app_finder: + if is_manifest(p): + # Remove localized manifest entries. + for e in [e for e in f if e.localized]: + f.remove(e) + # If the path is one that needs a locale replacement, use the + # corresponding file from the langpack. + path = None + if p in paths: + path = paths[p] + if not path: + continue + else: + base = mozpath.basedir(p, paths.keys()) + if base: + subpath = mozpath.relpath(p, base) + path = mozpath.normpath(mozpath.join(paths[base], + subpath)) + if path: + files = [f for p, f in l10n_finder.find(path)] + if not len(files): + if base not in non_chrome: + finderBase = "" + if hasattr(l10n_finder, 'base'): + finderBase = l10n_finder.base + errors.error("Missing file: %s" % + os.path.join(finderBase, path)) + else: + packager.add(path, files[0]) + else: + packager.add(p, f) + + # Add localized manifest entries from the langpack. + l10n_manifests = [] + for base in set(e.base for e in l10n.entries): + m = ManifestFile(base, [e for e in l10n.entries if e.base == base]) + path = mozpath.join(base, 'chrome.%s.manifest' % l10n_locale) + l10n_manifests.append((path, m)) + bases = packager.get_bases() + for path, m in l10n_manifests: + base = mozpath.basedir(path, bases) + packager.add(path, m) + # Add a "manifest $path" entry in the top manifest under that base. + m = ManifestFile(base) + m.add(Manifest(base, mozpath.relpath(path, base))) + packager.add(mozpath.join(base, 'chrome.manifest'), m) + + packager.close() + + # Add any remaining non chrome files. + for pattern in non_chrome: + for base in bases: + for p, f in l10n_finder.find(mozpath.join(base, pattern)): + if not formatter.contains(p): + formatter.add(p, f) + + # Transplant jar preloading information. + for path, log in app_finder.jarlogs.iteritems(): + assert isinstance(copier[path], Jarrer) + copier[path].preload([l.replace(locale, l10n_locale) for l in log]) + + +def repack(source, l10n, extra_l10n={}, non_resources=[], non_chrome=set()): + ''' + Replace localized data from the `source` directory with localized data + from `l10n` and `extra_l10n`. + + The `source` argument points to a directory containing a packaged + application (in omnijar, jar or flat form). + The `l10n` argument points to a directory containing the main localized + data (usually in the form of a language pack addon) to use to replace + in the packaged application. + The `extra_l10n` argument contains a dict associating relative paths in + the source to separate directories containing localized data for them. + This can be used to point at different language pack addons for different + parts of the package application. + The `non_resources` argument gives a list of relative paths in the source + that should not be added in an omnijar in case the packaged application + is in that format. + The `non_chrome` argument gives a list of file/directory patterns for + localized files that are not listed in a chrome.manifest. + ''' + app_finder = UnpackFinder(source) + l10n_finder = UnpackFinder(l10n) + if extra_l10n: + finders = { + '': l10n_finder, + } + for base, path in extra_l10n.iteritems(): + finders[base] = UnpackFinder(path) + l10n_finder = ComposedFinder(finders) + copier = FileCopier() + if app_finder.kind == 'flat': + formatter = FlatFormatter(copier) + elif app_finder.kind == 'jar': + formatter = JarFormatter(copier, + optimize=app_finder.optimizedjars, + compress=app_finder.compressed) + elif app_finder.kind == 'omni': + formatter = OmniJarFormatter(copier, app_finder.omnijar, + optimize=app_finder.optimizedjars, + compress=app_finder.compressed, + non_resources=non_resources) + + with errors.accumulate(): + _repack(app_finder, l10n_finder, copier, formatter, non_chrome) + copier.copy(source, skip_if_older=False) + generate_precomplete(source) diff --git a/python/mozbuild/mozpack/packager/unpack.py b/python/mozbuild/mozpack/packager/unpack.py new file mode 100644 index 000000000..fa2b474e7 --- /dev/null +++ b/python/mozbuild/mozpack/packager/unpack.py @@ -0,0 +1,202 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import mozpack.path as mozpath +from mozpack.files import ( + BaseFinder, + FileFinder, + DeflatedFile, + ManifestFile, +) +from mozpack.chrome.manifest import ( + parse_manifest, + ManifestEntryWithRelPath, + ManifestResource, + is_manifest, +) +from mozpack.mozjar import JarReader +from mozpack.copier import ( + FileRegistry, + FileCopier, +) +from mozpack.packager import SimplePackager +from mozpack.packager.formats import ( + FlatFormatter, + STARTUP_CACHE_PATHS, +) +from urlparse import urlparse + + +class UnpackFinder(BaseFinder): + ''' + Special Finder object that treats the source package directory as if it + were in the flat chrome format, whatever chrome format it actually is in. + + This means that for example, paths like chrome/browser/content/... match + files under jar:chrome/browser.jar!/content/... in case of jar chrome + format. + + The only argument to the constructor is a Finder instance or a path. + The UnpackFinder is populated with files from this Finder instance, + or with files from a FileFinder using the given path as its root. + ''' + def __init__(self, source): + if isinstance(source, BaseFinder): + self._finder = source + else: + self._finder = FileFinder(source) + self.base = self._finder.base + self.files = FileRegistry() + self.kind = 'flat' + self.omnijar = None + self.jarlogs = {} + self.optimizedjars = False + self.compressed = True + + jars = set() + + for p, f in self._finder.find('*'): + # Skip the precomplete file, which is generated at packaging time. + if p == 'precomplete': + continue + base = mozpath.dirname(p) + # If the file is a zip/jar that is not a .xpi, and contains a + # chrome.manifest, it is an omnijar. All the files it contains + # go in the directory containing the omnijar. Manifests are merged + # if there is a corresponding manifest in the directory. + if not p.endswith('.xpi') and self._maybe_zip(f) and \ + (mozpath.basename(p) == self.omnijar or + not self.omnijar): + jar = self._open_jar(p, f) + if 'chrome.manifest' in jar: + self.kind = 'omni' + self.omnijar = mozpath.basename(p) + self._fill_with_jar(base, jar) + continue + # If the file is a manifest, scan its entries for some referencing + # jar: urls. If there are some, the files contained in the jar they + # point to, go under a directory named after the jar. + if is_manifest(p): + m = self.files[p] if self.files.contains(p) \ + else ManifestFile(base) + for e in parse_manifest(self.base, p, f.open()): + m.add(self._handle_manifest_entry(e, jars)) + if self.files.contains(p): + continue + f = m + # If the file is a packed addon, unpack it under a directory named + # after the xpi. + if p.endswith('.xpi') and self._maybe_zip(f): + self._fill_with_jar(p[:-4], self._open_jar(p, f)) + continue + if not p in jars: + self.files.add(p, f) + + def _fill_with_jar(self, base, jar): + for j in jar: + path = mozpath.join(base, j.filename) + if is_manifest(j.filename): + m = self.files[path] if self.files.contains(path) \ + else ManifestFile(mozpath.dirname(path)) + for e in parse_manifest(None, path, j): + m.add(e) + if not self.files.contains(path): + self.files.add(path, m) + continue + else: + self.files.add(path, DeflatedFile(j)) + + def _handle_manifest_entry(self, entry, jars): + jarpath = None + if isinstance(entry, ManifestEntryWithRelPath) and \ + urlparse(entry.relpath).scheme == 'jar': + jarpath, entry = self._unjarize(entry, entry.relpath) + elif isinstance(entry, ManifestResource) and \ + urlparse(entry.target).scheme == 'jar': + jarpath, entry = self._unjarize(entry, entry.target) + if jarpath: + # Don't defer unpacking the jar file. If we already saw + # it, take (and remove) it from the registry. If we + # haven't, try to find it now. + if self.files.contains(jarpath): + jar = self.files[jarpath] + self.files.remove(jarpath) + else: + jar = [f for p, f in self._finder.find(jarpath)] + assert len(jar) == 1 + jar = jar[0] + if not jarpath in jars: + base = mozpath.splitext(jarpath)[0] + for j in self._open_jar(jarpath, jar): + self.files.add(mozpath.join(base, + j.filename), + DeflatedFile(j)) + jars.add(jarpath) + self.kind = 'jar' + return entry + + def _open_jar(self, path, file): + ''' + Return a JarReader for the given BaseFile instance, keeping a log of + the preloaded entries it has. + ''' + jar = JarReader(fileobj=file.open()) + if jar.is_optimized: + self.optimizedjars = True + if not any(f.compressed for f in jar): + self.compressed = False + if jar.last_preloaded: + jarlog = jar.entries.keys() + self.jarlogs[path] = jarlog[:jarlog.index(jar.last_preloaded) + 1] + return jar + + def find(self, path): + for p in self.files.match(path): + yield p, self.files[p] + + def _maybe_zip(self, file): + ''' + Return whether the given BaseFile looks like a ZIP/Jar. + ''' + header = file.open().read(8) + return len(header) == 8 and (header[0:2] == 'PK' or + header[4:6] == 'PK') + + def _unjarize(self, entry, relpath): + ''' + Transform a manifest entry pointing to chrome data in a jar in one + pointing to the corresponding unpacked path. Return the jar path and + the new entry. + ''' + base = entry.base + jar, relpath = urlparse(relpath).path.split('!', 1) + entry = entry.rebase(mozpath.join(base, 'jar:%s!' % jar)) \ + .move(mozpath.join(base, mozpath.splitext(jar)[0])) \ + .rebase(base) + return mozpath.join(base, jar), entry + + +def unpack_to_registry(source, registry): + ''' + Transform a jar chrome or omnijar packaged directory into a flat package. + + The given registry is filled with the flat package. + ''' + finder = UnpackFinder(source) + packager = SimplePackager(FlatFormatter(registry)) + for p, f in finder.find('*'): + if mozpath.split(p)[0] not in STARTUP_CACHE_PATHS: + packager.add(p, f) + packager.close() + + +def unpack(source): + ''' + Transform a jar chrome or omnijar packaged directory into a flat package. + ''' + copier = FileCopier() + unpack_to_registry(source, copier) + copier.copy(source, skip_if_older=False) diff --git a/python/mozbuild/mozpack/path.py b/python/mozbuild/mozpack/path.py new file mode 100644 index 000000000..7ea8ea85a --- /dev/null +++ b/python/mozbuild/mozpack/path.py @@ -0,0 +1,136 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import posixpath +import os +import re + +''' +Like os.path, with a reduced set of functions, and with normalized path +separators (always use forward slashes). +Also contains a few additional utilities not found in os.path. +''' + + +def normsep(path): + ''' + Normalize path separators, by using forward slashes instead of whatever + os.sep is. + ''' + if os.sep != '/': + path = path.replace(os.sep, '/') + if os.altsep and os.altsep != '/': + path = path.replace(os.altsep, '/') + return path + + +def relpath(path, start): + rel = normsep(os.path.relpath(path, start)) + return '' if rel == '.' else rel + + +def realpath(path): + return normsep(os.path.realpath(path)) + + +def abspath(path): + return normsep(os.path.abspath(path)) + + +def join(*paths): + return normsep(os.path.join(*paths)) + + +def normpath(path): + return posixpath.normpath(normsep(path)) + + +def dirname(path): + return posixpath.dirname(normsep(path)) + + +def commonprefix(paths): + return posixpath.commonprefix([normsep(path) for path in paths]) + + +def basename(path): + return os.path.basename(path) + + +def splitext(path): + return posixpath.splitext(normsep(path)) + + +def split(path): + ''' + Return the normalized path as a list of its components. + split('foo/bar/baz') returns ['foo', 'bar', 'baz'] + ''' + return normsep(path).split('/') + + +def basedir(path, bases): + ''' + Given a list of directories (bases), return which one contains the given + path. If several matches are found, the deepest base directory is returned. + basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar']) returns 'foo/bar' + ('foo' and 'foo/bar' both match, but 'foo/bar' is the deepest match) + ''' + path = normsep(path) + bases = [normsep(b) for b in bases] + if path in bases: + return path + for b in sorted(bases, reverse=True): + if b == '' or path.startswith(b + '/'): + return b + + +re_cache = {} + +def match(path, pattern): + ''' + Return whether the given path matches the given pattern. + An asterisk can be used to match any string, including the null string, in + one part of the path: + 'foo' matches '*', 'f*' or 'fo*o' + However, an asterisk matching a subdirectory may not match the null string: + 'foo/bar' does *not* match 'foo/*/bar' + If the pattern matches one of the ancestor directories of the path, the + patch is considered matching: + 'foo/bar' matches 'foo' + Two adjacent asterisks can be used to match files and zero or more + directories and subdirectories. + 'foo/bar' matches 'foo/**/bar', or '**/bar' + ''' + if not pattern: + return True + if pattern not in re_cache: + p = re.escape(pattern) + p = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\1(?:.+/)?', p) + p = re.sub(r'(^|\\\/)\\\*\\\*$', r'(?:\1.+)?', p) + p = p.replace(r'\*', '[^/]*') + '(?:/.*)?$' + re_cache[pattern] = re.compile(p) + return re_cache[pattern].match(path) is not None + + +def rebase(oldbase, base, relativepath): + ''' + Return relativepath relative to base instead of oldbase. + ''' + if base == oldbase: + return relativepath + if len(base) < len(oldbase): + assert basedir(oldbase, [base]) == base + relbase = relpath(oldbase, base) + result = join(relbase, relativepath) + else: + assert basedir(base, [oldbase]) == oldbase + relbase = relpath(base, oldbase) + result = relpath(relativepath, relbase) + result = normpath(result) + if relativepath.endswith('/') and not result.endswith('/'): + result += '/' + return result diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/python/mozbuild/mozpack/test/__init__.py diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data new file mode 100644 index 000000000..fb7f0c4fc --- /dev/null +++ b/python/mozbuild/mozpack/test/data/test_data @@ -0,0 +1 @@ +test_data
\ No newline at end of file diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py new file mode 100644 index 000000000..8e4e8b759 --- /dev/null +++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py @@ -0,0 +1,17 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import print_function +import sys + + +if len(sys.argv) != 4: + raise Exception('Usage: minify_js_verify <exitcode> <orig> <minified>') + +retcode = int(sys.argv[1]) + +if retcode: + print('Error message', file=sys.stderr) + +sys.exit(retcode) diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py new file mode 100644 index 000000000..6f61f7eb7 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_archive.py @@ -0,0 +1,190 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import hashlib +import os +import shutil +import stat +import tarfile +import tempfile +import unittest + +from mozpack.archive import ( + DEFAULT_MTIME, + create_tar_from_files, + create_tar_gz_from_files, + create_tar_bz2_from_files, +) + +from mozunit import main + + +MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + + +def file_hash(path): + h = hashlib.sha1() + with open(path, 'rb') as fh: + while True: + data = fh.read(8192) + if not data: + break + h.update(data) + + return h.hexdigest() + + +class TestArchive(unittest.TestCase): + def _create_files(self, root): + files = {} + for i in range(10): + p = os.path.join(root, b'file%d' % i) + with open(p, 'wb') as fh: + fh.write(b'file%d' % i) + # Need to set permissions or umask may influence testing. + os.chmod(p, MODE_STANDARD) + files[b'file%d' % i] = p + + return files + + def _verify_basic_tarfile(self, tf): + self.assertEqual(len(tf.getmembers()), 10) + + names = ['file%d' % i for i in range(10)] + self.assertEqual(tf.getnames(), names) + + for ti in tf.getmembers(): + self.assertEqual(ti.uid, 0) + self.assertEqual(ti.gid, 0) + self.assertEqual(ti.uname, '') + self.assertEqual(ti.gname, '') + self.assertEqual(ti.mode, MODE_STANDARD) + self.assertEqual(ti.mtime, DEFAULT_MTIME) + + def test_dirs_refused(self): + d = tempfile.mkdtemp() + try: + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + with self.assertRaisesRegexp(ValueError, 'not a regular'): + create_tar_from_files(fh, {'test': d}) + finally: + shutil.rmtree(d) + + def test_setuid_setgid_refused(self): + d = tempfile.mkdtemp() + try: + uid = os.path.join(d, 'setuid') + gid = os.path.join(d, 'setgid') + with open(uid, 'a'): + pass + with open(gid, 'a'): + pass + + os.chmod(uid, MODE_STANDARD | stat.S_ISUID) + os.chmod(gid, MODE_STANDARD | stat.S_ISGID) + + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'): + create_tar_from_files(fh, {'test': uid}) + with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'): + create_tar_from_files(fh, {'test': gid}) + finally: + shutil.rmtree(d) + + def test_create_tar_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + create_tar_from_files(fh, files) + + # Output should be deterministic. + self.assertEqual(file_hash(tp), 'cd16cee6f13391abd94dfa435d2633b61ed727f1') + + with tarfile.open(tp, 'r') as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_executable_preserved(self): + d = tempfile.mkdtemp() + try: + p = os.path.join(d, 'exec') + with open(p, 'wb') as fh: + fh.write('#!/bin/bash\n') + os.chmod(p, MODE_STANDARD | stat.S_IXUSR) + + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + create_tar_from_files(fh, {'exec': p}) + + self.assertEqual(file_hash(tp), '357e1b81c0b6cfdfa5d2d118d420025c3c76ee93') + + with tarfile.open(tp, 'r') as tf: + m = tf.getmember('exec') + self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR) + + finally: + shutil.rmtree(d) + + def test_create_tar_gz_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + gp = os.path.join(d, 'test.tar.gz') + with open(gp, 'wb') as fh: + create_tar_gz_from_files(fh, files) + + self.assertEqual(file_hash(gp), 'acb602239c1aeb625da5e69336775609516d60f5') + + with tarfile.open(gp, 'r:gz') as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_tar_gz_name(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + gp = os.path.join(d, 'test.tar.gz') + with open(gp, 'wb') as fh: + create_tar_gz_from_files(fh, files, filename='foobar', compresslevel=1) + + self.assertEqual(file_hash(gp), 'fd099f96480cc1100f37baa8e89a6b820dbbcbd3') + + with tarfile.open(gp, 'r:gz') as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_create_tar_bz2_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + bp = os.path.join(d, 'test.tar.bz2') + with open(bp, 'wb') as fh: + create_tar_bz2_from_files(fh, files) + + self.assertEqual(file_hash(bp), '1827ad00dfe7acf857b7a1c95ce100361e3f6eea') + + with tarfile.open(bp, 'r:bz2') as tf: + self._verify_basic_tarfile(tf) + finally: + shutil.rmtree(d) + + +if __name__ == '__main__': + main() diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py new file mode 100644 index 000000000..e6a5257e9 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_chrome_flags.py @@ -0,0 +1,148 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest +import mozunit +from mozpack.chrome.flags import ( + Flag, + StringFlag, + VersionFlag, + Flags, +) +from mozpack.errors import ErrorMessage + + +class TestFlag(unittest.TestCase): + def test_flag(self): + flag = Flag('flag') + self.assertEqual(str(flag), '') + self.assertTrue(flag.matches(False)) + self.assertTrue(flag.matches('false')) + self.assertFalse(flag.matches('true')) + self.assertRaises(ErrorMessage, flag.add_definition, 'flag=') + self.assertRaises(ErrorMessage, flag.add_definition, 'flag=42') + self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=false') + + flag.add_definition('flag=1') + self.assertEqual(str(flag), 'flag=1') + self.assertTrue(flag.matches(True)) + self.assertTrue(flag.matches('1')) + self.assertFalse(flag.matches('no')) + + flag.add_definition('flag=true') + self.assertEqual(str(flag), 'flag=true') + self.assertTrue(flag.matches(True)) + self.assertTrue(flag.matches('true')) + self.assertFalse(flag.matches('0')) + + flag.add_definition('flag=no') + self.assertEqual(str(flag), 'flag=no') + self.assertTrue(flag.matches('false')) + self.assertFalse(flag.matches('1')) + + flag.add_definition('flag') + self.assertEqual(str(flag), 'flag') + self.assertFalse(flag.matches('false')) + self.assertTrue(flag.matches('true')) + self.assertFalse(flag.matches(False)) + + def test_string_flag(self): + flag = StringFlag('flag') + self.assertEqual(str(flag), '') + self.assertTrue(flag.matches('foo')) + self.assertRaises(ErrorMessage, flag.add_definition, 'flag>=2') + + flag.add_definition('flag=foo') + self.assertEqual(str(flag), 'flag=foo') + self.assertTrue(flag.matches('foo')) + self.assertFalse(flag.matches('bar')) + + flag.add_definition('flag=bar') + self.assertEqual(str(flag), 'flag=foo flag=bar') + self.assertTrue(flag.matches('foo')) + self.assertTrue(flag.matches('bar')) + self.assertFalse(flag.matches('baz')) + + flag = StringFlag('flag') + flag.add_definition('flag!=bar') + self.assertEqual(str(flag), 'flag!=bar') + self.assertTrue(flag.matches('foo')) + self.assertFalse(flag.matches('bar')) + + def test_version_flag(self): + flag = VersionFlag('flag') + self.assertEqual(str(flag), '') + self.assertTrue(flag.matches('1.0')) + self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=2') + + flag.add_definition('flag=1.0') + self.assertEqual(str(flag), 'flag=1.0') + self.assertTrue(flag.matches('1.0')) + self.assertFalse(flag.matches('2.0')) + + flag.add_definition('flag=2.0') + self.assertEqual(str(flag), 'flag=1.0 flag=2.0') + self.assertTrue(flag.matches('1.0')) + self.assertTrue(flag.matches('2.0')) + self.assertFalse(flag.matches('3.0')) + + flag = VersionFlag('flag') + flag.add_definition('flag>=2.0') + self.assertEqual(str(flag), 'flag>=2.0') + self.assertFalse(flag.matches('1.0')) + self.assertTrue(flag.matches('2.0')) + self.assertTrue(flag.matches('3.0')) + + flag.add_definition('flag<1.10') + self.assertEqual(str(flag), 'flag>=2.0 flag<1.10') + self.assertTrue(flag.matches('1.0')) + self.assertTrue(flag.matches('1.9')) + self.assertFalse(flag.matches('1.10')) + self.assertFalse(flag.matches('1.20')) + self.assertTrue(flag.matches('2.0')) + self.assertTrue(flag.matches('3.0')) + self.assertRaises(Exception, flag.add_definition, 'flag<') + self.assertRaises(Exception, flag.add_definition, 'flag>') + self.assertRaises(Exception, flag.add_definition, 'flag>=') + self.assertRaises(Exception, flag.add_definition, 'flag<=') + self.assertRaises(Exception, flag.add_definition, 'flag!=1.0') + + +class TestFlags(unittest.TestCase): + def setUp(self): + self.flags = Flags('contentaccessible=yes', + 'appversion>=3.5', + 'application=foo', + 'application=bar', + 'appversion<2.0', + 'platform', + 'abi!=Linux_x86-gcc3') + + def test_flags_str(self): + self.assertEqual(str(self.flags), 'contentaccessible=yes ' + + 'appversion>=3.5 appversion<2.0 application=foo ' + + 'application=bar platform abi!=Linux_x86-gcc3') + + def test_flags_match_unset(self): + self.assertTrue(self.flags.match(os='WINNT')) + + def test_flags_match(self): + self.assertTrue(self.flags.match(application='foo')) + self.assertFalse(self.flags.match(application='qux')) + + def test_flags_match_different(self): + self.assertTrue(self.flags.match(abi='WINNT_x86-MSVC')) + self.assertFalse(self.flags.match(abi='Linux_x86-gcc3')) + + def test_flags_match_version(self): + self.assertTrue(self.flags.match(appversion='1.0')) + self.assertTrue(self.flags.match(appversion='1.5')) + self.assertFalse(self.flags.match(appversion='2.0')) + self.assertFalse(self.flags.match(appversion='3.0')) + self.assertTrue(self.flags.match(appversion='3.5')) + self.assertTrue(self.flags.match(appversion='3.10')) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py new file mode 100644 index 000000000..690c6acdc --- /dev/null +++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py @@ -0,0 +1,149 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest +import mozunit +import os +from mozpack.chrome.manifest import ( + ManifestContent, + ManifestLocale, + ManifestSkin, + Manifest, + ManifestResource, + ManifestOverride, + ManifestComponent, + ManifestContract, + ManifestInterfaces, + ManifestBinaryComponent, + ManifestCategory, + ManifestStyle, + ManifestOverlay, + MANIFESTS_TYPES, + parse_manifest, + parse_manifest_line, +) +from mozpack.errors import errors, AccumulatedErrors +from test_errors import TestErrors + + +class TestManifest(unittest.TestCase): + def test_parse_manifest(self): + manifest = [ + 'content global content/global/', + 'content global content/global/ application=foo application=bar' + + ' platform', + 'locale global en-US content/en-US/', + 'locale global en-US content/en-US/ application=foo', + 'skin global classic/1.0 content/skin/classic/', + 'skin global classic/1.0 content/skin/classic/ application=foo' + + ' os=WINNT', + '', + 'manifest pdfjs/chrome.manifest', + 'resource gre-resources toolkit/res/', + 'override chrome://global/locale/netError.dtd' + + ' chrome://browser/locale/netError.dtd', + '# Comment', + 'component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js', + 'contract @mozilla.org/foo;1' + + ' {b2bba4df-057d-41ea-b6b1-94a10a8ede68}', + 'interfaces foo.xpt', + 'binary-component bar.so', + 'category command-line-handler m-browser' + + ' @mozilla.org/browser/clh;1' + + ' application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}', + 'style chrome://global/content/customizeToolbar.xul' + + ' chrome://browser/skin/', + 'overlay chrome://global/content/viewSource.xul' + + ' chrome://browser/content/viewSourceOverlay.xul', + ] + other_manifest = [ + 'content global content/global/' + ] + expected_result = [ + ManifestContent('', 'global', 'content/global/'), + ManifestContent('', 'global', 'content/global/', 'application=foo', + 'application=bar', 'platform'), + ManifestLocale('', 'global', 'en-US', 'content/en-US/'), + ManifestLocale('', 'global', 'en-US', 'content/en-US/', + 'application=foo'), + ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/'), + ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/', + 'application=foo', 'os=WINNT'), + Manifest('', 'pdfjs/chrome.manifest'), + ManifestResource('', 'gre-resources', 'toolkit/res/'), + ManifestOverride('', 'chrome://global/locale/netError.dtd', + 'chrome://browser/locale/netError.dtd'), + ManifestComponent('', '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}', + 'foo.js'), + ManifestContract('', '@mozilla.org/foo;1', + '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}'), + ManifestInterfaces('', 'foo.xpt'), + ManifestBinaryComponent('', 'bar.so'), + ManifestCategory('', 'command-line-handler', 'm-browser', + '@mozilla.org/browser/clh;1', 'application=' + + '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'), + ManifestStyle('', 'chrome://global/content/customizeToolbar.xul', + 'chrome://browser/skin/'), + ManifestOverlay('', 'chrome://global/content/viewSource.xul', + 'chrome://browser/content/viewSourceOverlay.xul'), + ] + with mozunit.MockedOpen({'manifest': '\n'.join(manifest), + 'other/manifest': '\n'.join(other_manifest)}): + # Ensure we have tests for all types of manifests. + self.assertEqual(set(type(e) for e in expected_result), + set(MANIFESTS_TYPES.values())) + self.assertEqual(list(parse_manifest(os.curdir, 'manifest')), + expected_result) + self.assertEqual(list(parse_manifest(os.curdir, 'other/manifest')), + [ManifestContent('other', 'global', + 'content/global/')]) + + def test_manifest_rebase(self): + m = parse_manifest_line('chrome', 'content global content/global/') + m = m.rebase('') + self.assertEqual(str(m), 'content global chrome/content/global/') + m = m.rebase('chrome') + self.assertEqual(str(m), 'content global content/global/') + + m = parse_manifest_line('chrome/foo', 'content global content/global/') + m = m.rebase('chrome') + self.assertEqual(str(m), 'content global foo/content/global/') + m = m.rebase('chrome/foo') + self.assertEqual(str(m), 'content global content/global/') + + m = parse_manifest_line('modules/foo', 'resource foo ./') + m = m.rebase('modules') + self.assertEqual(str(m), 'resource foo foo/') + m = m.rebase('modules/foo') + self.assertEqual(str(m), 'resource foo ./') + + m = parse_manifest_line('chrome', 'content browser browser/content/') + m = m.rebase('chrome/browser').move('jar:browser.jar!').rebase('') + self.assertEqual(str(m), 'content browser jar:browser.jar!/content/') + + +class TestManifestErrors(TestErrors, unittest.TestCase): + def test_parse_manifest_errors(self): + manifest = [ + 'skin global classic/1.0 content/skin/classic/ platform', + '', + 'binary-component bar.so', + 'unsupported foo', + ] + with mozunit.MockedOpen({'manifest': '\n'.join(manifest)}): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + list(parse_manifest(os.curdir, 'manifest')) + out = self.get_output() + # Expecting 2 errors + self.assertEqual(len(out), 2) + path = os.path.abspath('manifest') + # First on line 1 + self.assertTrue(out[0].startswith('Error: %s:1: ' % path)) + # Second on line 4 + self.assertTrue(out[1].startswith('Error: %s:4: ' % path)) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py new file mode 100644 index 000000000..6688b3d5e --- /dev/null +++ b/python/mozbuild/mozpack/test/test_copier.py @@ -0,0 +1,529 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozpack.copier import ( + FileCopier, + FileRegistry, + FileRegistrySubtree, + Jarrer, +) +from mozpack.files import ( + GeneratedFile, + ExistingFile, +) +from mozpack.mozjar import JarReader +import mozpack.path as mozpath +import unittest +import mozunit +import os +import stat +from mozpack.errors import ErrorMessage +from mozpack.test.test_files import ( + MockDest, + MatchTestTemplate, + TestWithTmpDir, +) + + +class BaseTestFileRegistry(MatchTestTemplate): + def add(self, path): + self.registry.add(path, GeneratedFile(path)) + + def do_check(self, pattern, result): + self.checked = True + if result: + self.assertTrue(self.registry.contains(pattern)) + else: + self.assertFalse(self.registry.contains(pattern)) + self.assertEqual(self.registry.match(pattern), result) + + def do_test_file_registry(self, registry): + self.registry = registry + self.registry.add('foo', GeneratedFile('foo')) + bar = GeneratedFile('bar') + self.registry.add('bar', bar) + self.assertEqual(self.registry.paths(), ['foo', 'bar']) + self.assertEqual(self.registry['bar'], bar) + + self.assertRaises(ErrorMessage, self.registry.add, 'foo', + GeneratedFile('foo2')) + + self.assertRaises(ErrorMessage, self.registry.remove, 'qux') + + self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar', + GeneratedFile('foobar')) + self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar/baz', + GeneratedFile('foobar')) + + self.assertEqual(self.registry.paths(), ['foo', 'bar']) + + self.registry.remove('foo') + self.assertEqual(self.registry.paths(), ['bar']) + self.registry.remove('bar') + self.assertEqual(self.registry.paths(), []) + + self.prepare_match_test() + self.do_match_test() + self.assertTrue(self.checked) + self.assertEqual(self.registry.paths(), [ + 'bar', + 'foo/bar', + 'foo/baz', + 'foo/qux/1', + 'foo/qux/bar', + 'foo/qux/2/test', + 'foo/qux/2/test2', + ]) + + self.registry.remove('foo/qux') + self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz']) + + self.registry.add('foo/qux', GeneratedFile('fooqux')) + self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz', + 'foo/qux']) + self.registry.remove('foo/b*') + self.assertEqual(self.registry.paths(), ['bar', 'foo/qux']) + + self.assertEqual([f for f, c in self.registry], ['bar', 'foo/qux']) + self.assertEqual(len(self.registry), 2) + + self.add('foo/.foo') + self.assertTrue(self.registry.contains('foo/.foo')) + + def do_test_registry_paths(self, registry): + self.registry = registry + + # Can't add a file if it requires a directory in place of a + # file we also require. + self.registry.add('foo', GeneratedFile('foo')) + self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar', + GeneratedFile('foobar')) + + # Can't add a file if we already have a directory there. + self.registry.add('bar/baz', GeneratedFile('barbaz')) + self.assertRaises(ErrorMessage, self.registry.add, 'bar', + GeneratedFile('bar')) + + # Bump the count of things that require bar/ to 2. + self.registry.add('bar/zot', GeneratedFile('barzot')) + self.assertRaises(ErrorMessage, self.registry.add, 'bar', + GeneratedFile('bar')) + + # Drop the count of things that require bar/ to 1. + self.registry.remove('bar/baz') + self.assertRaises(ErrorMessage, self.registry.add, 'bar', + GeneratedFile('bar')) + + # Drop the count of things that require bar/ to 0. + self.registry.remove('bar/zot') + self.registry.add('bar/zot', GeneratedFile('barzot')) + +class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase): + def test_partial_paths(self): + cases = { + 'foo/bar/baz/zot': ['foo/bar/baz', 'foo/bar', 'foo'], + 'foo/bar': ['foo'], + 'bar': [], + } + reg = FileRegistry() + for path, parts in cases.iteritems(): + self.assertEqual(reg._partial_paths(path), parts) + + def test_file_registry(self): + self.do_test_file_registry(FileRegistry()) + + def test_registry_paths(self): + self.do_test_registry_paths(FileRegistry()) + + def test_required_directories(self): + self.registry = FileRegistry() + + self.registry.add('foo', GeneratedFile('foo')) + self.assertEqual(self.registry.required_directories(), set()) + + self.registry.add('bar/baz', GeneratedFile('barbaz')) + self.assertEqual(self.registry.required_directories(), {'bar'}) + + self.registry.add('bar/zot', GeneratedFile('barzot')) + self.assertEqual(self.registry.required_directories(), {'bar'}) + + self.registry.add('bar/zap/zot', GeneratedFile('barzapzot')) + self.assertEqual(self.registry.required_directories(), {'bar', 'bar/zap'}) + + self.registry.remove('bar/zap/zot') + self.assertEqual(self.registry.required_directories(), {'bar'}) + + self.registry.remove('bar/baz') + self.assertEqual(self.registry.required_directories(), {'bar'}) + + self.registry.remove('bar/zot') + self.assertEqual(self.registry.required_directories(), set()) + + self.registry.add('x/y/z', GeneratedFile('xyz')) + self.assertEqual(self.registry.required_directories(), {'x', 'x/y'}) + + +class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase): + def test_file_registry_subtree_base(self): + registry = FileRegistry() + self.assertEqual(registry, FileRegistrySubtree('', registry)) + self.assertNotEqual(registry, FileRegistrySubtree('base', registry)) + + def create_registry(self): + registry = FileRegistry() + registry.add('foo/bar', GeneratedFile('foo/bar')) + registry.add('baz/qux', GeneratedFile('baz/qux')) + return FileRegistrySubtree('base/root', registry) + + def test_file_registry_subtree(self): + self.do_test_file_registry(self.create_registry()) + + def test_registry_paths_subtree(self): + registry = FileRegistry() + self.do_test_registry_paths(self.create_registry()) + + +class TestFileCopier(TestWithTmpDir): + def all_dirs(self, base): + all_dirs = set() + for root, dirs, files in os.walk(base): + if not dirs: + all_dirs.add(mozpath.relpath(root, base)) + return all_dirs + + def all_files(self, base): + all_files = set() + for root, dirs, files in os.walk(base): + for f in files: + all_files.add( + mozpath.join(mozpath.relpath(root, base), f)) + return all_files + + def test_file_copier(self): + copier = FileCopier() + copier.add('foo/bar', GeneratedFile('foobar')) + copier.add('foo/qux', GeneratedFile('fooqux')) + copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) + copier.add('bar', GeneratedFile('bar')) + copier.add('qux/foo', GeneratedFile('quxfoo')) + copier.add('qux/bar', GeneratedFile('')) + + result = copier.copy(self.tmpdir) + self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) + self.assertEqual(self.all_dirs(self.tmpdir), + set(['foo/deep/nested/directory', 'qux'])) + + self.assertEqual(result.updated_files, set(self.tmppath(p) for p in + self.all_files(self.tmpdir))) + self.assertEqual(result.existing_files, set()) + self.assertEqual(result.removed_files, set()) + self.assertEqual(result.removed_directories, set()) + + copier.remove('foo') + copier.add('test', GeneratedFile('test')) + result = copier.copy(self.tmpdir) + self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) + self.assertEqual(self.all_dirs(self.tmpdir), set(['qux'])) + self.assertEqual(result.removed_files, set(self.tmppath(p) for p in + ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file'))) + + def test_symlink_directory_replaced(self): + """Directory symlinks in destination are replaced if they need to be + real directories.""" + if not self.symlink_supported: + return + + dest = self.tmppath('dest') + + copier = FileCopier() + copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) + + os.makedirs(self.tmppath('dest/foo')) + dummy = self.tmppath('dummy') + os.mkdir(dummy) + link = self.tmppath('dest/foo/bar') + os.symlink(dummy, link) + + result = copier.copy(dest) + + st = os.lstat(link) + self.assertFalse(stat.S_ISLNK(st.st_mode)) + self.assertTrue(stat.S_ISDIR(st.st_mode)) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + + self.assertEqual(result.removed_directories, set()) + self.assertEqual(len(result.updated_files), 1) + + def test_remove_unaccounted_directory_symlinks(self): + """Directory symlinks in destination that are not in the way are + deleted according to remove_unaccounted and + remove_all_directory_symlinks. + """ + if not self.symlink_supported: + return + + dest = self.tmppath('dest') + + copier = FileCopier() + copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) + + os.makedirs(self.tmppath('dest/foo')) + dummy = self.tmppath('dummy') + os.mkdir(dummy) + + os.mkdir(self.tmppath('dest/zot')) + link = self.tmppath('dest/zot/zap') + os.symlink(dummy, link) + + # If not remove_unaccounted but remove_empty_directories, then + # the symlinked directory remains (as does its containing + # directory). + result = copier.copy(dest, remove_unaccounted=False, + remove_empty_directories=True, + remove_all_directory_symlinks=False) + + st = os.lstat(link) + self.assertTrue(stat.S_ISLNK(st.st_mode)) + self.assertFalse(stat.S_ISDIR(st.st_mode)) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + self.assertEqual(self.all_dirs(dest), set(['foo/bar'])) + + self.assertEqual(result.removed_directories, set()) + self.assertEqual(len(result.updated_files), 1) + + # If remove_unaccounted but not remove_empty_directories, then + # only the symlinked directory is removed. + result = copier.copy(dest, remove_unaccounted=True, + remove_empty_directories=False, + remove_all_directory_symlinks=False) + + st = os.lstat(self.tmppath('dest/zot')) + self.assertFalse(stat.S_ISLNK(st.st_mode)) + self.assertTrue(stat.S_ISDIR(st.st_mode)) + + self.assertEqual(result.removed_files, set([link])) + self.assertEqual(result.removed_directories, set()) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + self.assertEqual(self.all_dirs(dest), set(['foo/bar', 'zot'])) + + # If remove_unaccounted and remove_empty_directories, then + # both the symlink and its containing directory are removed. + link = self.tmppath('dest/zot/zap') + os.symlink(dummy, link) + + result = copier.copy(dest, remove_unaccounted=True, + remove_empty_directories=True, + remove_all_directory_symlinks=False) + + self.assertEqual(result.removed_files, set([link])) + self.assertEqual(result.removed_directories, set([self.tmppath('dest/zot')])) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + self.assertEqual(self.all_dirs(dest), set(['foo/bar'])) + + def test_permissions(self): + """Ensure files without write permission can be deleted.""" + with open(self.tmppath('dummy'), 'a'): + pass + + p = self.tmppath('no_perms') + with open(p, 'a'): + pass + + # Make file and directory unwritable. Reminder: making a directory + # unwritable prevents modifications (including deletes) from the list + # of files in that directory. + os.chmod(p, 0o400) + os.chmod(self.tmpdir, 0o400) + + copier = FileCopier() + copier.add('dummy', GeneratedFile('content')) + result = copier.copy(self.tmpdir) + self.assertEqual(result.removed_files_count, 1) + self.assertFalse(os.path.exists(p)) + + def test_no_remove(self): + copier = FileCopier() + copier.add('foo', GeneratedFile('foo')) + + with open(self.tmppath('bar'), 'a'): + pass + + os.mkdir(self.tmppath('emptydir')) + d = self.tmppath('populateddir') + os.mkdir(d) + + with open(self.tmppath('populateddir/foo'), 'a'): + pass + + result = copier.copy(self.tmpdir, remove_unaccounted=False) + + self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar', + 'populateddir/foo'])) + self.assertEqual(self.all_dirs(self.tmpdir), set(['populateddir'])) + self.assertEqual(result.removed_files, set()) + self.assertEqual(result.removed_directories, + set([self.tmppath('emptydir')])) + + def test_no_remove_empty_directories(self): + copier = FileCopier() + copier.add('foo', GeneratedFile('foo')) + + with open(self.tmppath('bar'), 'a'): + pass + + os.mkdir(self.tmppath('emptydir')) + d = self.tmppath('populateddir') + os.mkdir(d) + + with open(self.tmppath('populateddir/foo'), 'a'): + pass + + result = copier.copy(self.tmpdir, remove_unaccounted=False, + remove_empty_directories=False) + + self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar', + 'populateddir/foo'])) + self.assertEqual(self.all_dirs(self.tmpdir), set(['emptydir', + 'populateddir'])) + self.assertEqual(result.removed_files, set()) + self.assertEqual(result.removed_directories, set()) + + def test_optional_exists_creates_unneeded_directory(self): + """Demonstrate that a directory not strictly required, but specified + as the path to an optional file, will be unnecessarily created. + + This behaviour is wrong; fixing it is tracked by Bug 972432; + and this test exists to guard against unexpected changes in + behaviour. + """ + + dest = self.tmppath('dest') + + copier = FileCopier() + copier.add('foo/bar', ExistingFile(required=False)) + + result = copier.copy(dest) + + st = os.lstat(self.tmppath('dest/foo')) + self.assertFalse(stat.S_ISLNK(st.st_mode)) + self.assertTrue(stat.S_ISDIR(st.st_mode)) + + # What's worse, we have no record that dest was created. + self.assertEquals(len(result.updated_files), 0) + + # But we do have an erroneous record of an optional file + # existing when it does not. + self.assertIn(self.tmppath('dest/foo/bar'), result.existing_files) + + def test_remove_unaccounted_file_registry(self): + """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" + + dest = self.tmppath('dest') + + copier = FileCopier() + copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) + copier.add('foo/bar/qux', GeneratedFile('foobarqux')) + copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga')) + copier.add('foo/toto/tata', GeneratedFile('footototata')) + + os.makedirs(os.path.join(dest, 'bar')) + with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh: + fh.write('barbar'); + os.makedirs(os.path.join(dest, 'foo', 'toto')) + with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh: + fh.write('foototototo'); + + result = copier.copy(dest, remove_unaccounted=False) + + self.assertEqual(self.all_files(dest), + set(copier.paths()) | { 'foo/toto/toto', 'bar/bar'}) + self.assertEqual(self.all_dirs(dest), + {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'}) + + copier2 = FileCopier() + copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga')) + + # We expect only files copied from the first copier to be removed, + # not the extra file that was there beforehand. + result = copier2.copy(dest, remove_unaccounted=copier) + + self.assertEqual(self.all_files(dest), + set(copier2.paths()) | { 'foo/toto/toto', 'bar/bar'}) + self.assertEqual(self.all_dirs(dest), + {'foo/hoge', 'foo/toto', 'bar'}) + self.assertEqual(result.updated_files, + {self.tmppath('dest/foo/hoge/fuga')}) + self.assertEqual(result.existing_files, set()) + self.assertEqual(result.removed_files, {self.tmppath(p) for p in + ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata')}) + self.assertEqual(result.removed_directories, + {self.tmppath('dest/foo/bar')}) + + +class TestJarrer(unittest.TestCase): + def check_jar(self, dest, copier): + jar = JarReader(fileobj=dest) + self.assertEqual([f.filename for f in jar], copier.paths()) + for f in jar: + self.assertEqual(f.uncompressed_data.read(), + copier[f.filename].content) + + def test_jarrer(self): + copier = Jarrer() + copier.add('foo/bar', GeneratedFile('foobar')) + copier.add('foo/qux', GeneratedFile('fooqux')) + copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) + copier.add('bar', GeneratedFile('bar')) + copier.add('qux/foo', GeneratedFile('quxfoo')) + copier.add('qux/bar', GeneratedFile('')) + + dest = MockDest() + copier.copy(dest) + self.check_jar(dest, copier) + + copier.remove('foo') + copier.add('test', GeneratedFile('test')) + copier.copy(dest) + self.check_jar(dest, copier) + + copier.remove('test') + copier.add('test', GeneratedFile('replaced-content')) + copier.copy(dest) + self.check_jar(dest, copier) + + copier.copy(dest) + self.check_jar(dest, copier) + + preloaded = ['qux/bar', 'bar'] + copier.preload(preloaded) + copier.copy(dest) + + dest.seek(0) + jar = JarReader(fileobj=dest) + self.assertEqual([f.filename for f in jar], preloaded + + [p for p in copier.paths() if not p in preloaded]) + self.assertEqual(jar.last_preloaded, preloaded[-1]) + + + def test_jarrer_compress(self): + copier = Jarrer() + copier.add('foo/bar', GeneratedFile('ffffff')) + copier.add('foo/qux', GeneratedFile('ffffff'), compress=False) + + dest = MockDest() + copier.copy(dest) + self.check_jar(dest, copier) + + dest.seek(0) + jar = JarReader(fileobj=dest) + self.assertTrue(jar['foo/bar'].compressed) + self.assertFalse(jar['foo/qux'].compressed) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py new file mode 100644 index 000000000..16e2b0496 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_errors.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozpack.errors import ( + errors, + ErrorMessage, + AccumulatedErrors, +) +import unittest +import mozunit +import sys +from cStringIO import StringIO + + +class TestErrors(object): + def setUp(self): + errors.out = StringIO() + errors.ignore_errors(False) + + def tearDown(self): + errors.out = sys.stderr + + def get_output(self): + return [l.strip() for l in errors.out.getvalue().splitlines()] + + +class TestErrorsImpl(TestErrors, unittest.TestCase): + def test_plain_error(self): + errors.warn('foo') + self.assertRaises(ErrorMessage, errors.error, 'foo') + self.assertRaises(ErrorMessage, errors.fatal, 'foo') + self.assertEquals(self.get_output(), ['Warning: foo']) + + def test_ignore_errors(self): + errors.ignore_errors() + errors.warn('foo') + errors.error('bar') + self.assertRaises(ErrorMessage, errors.fatal, 'foo') + self.assertEquals(self.get_output(), ['Warning: foo', 'Warning: bar']) + + def test_no_error(self): + with errors.accumulate(): + errors.warn('1') + + def test_simple_error(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + errors.error('1') + self.assertEquals(self.get_output(), ['Error: 1']) + + def test_error_loop(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + for i in range(3): + errors.error('%d' % i) + self.assertEquals(self.get_output(), + ['Error: 0', 'Error: 1', 'Error: 2']) + + def test_multiple_errors(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + errors.error('foo') + for i in range(3): + if i == 2: + errors.warn('%d' % i) + else: + errors.error('%d' % i) + errors.error('bar') + self.assertEquals(self.get_output(), + ['Error: foo', 'Error: 0', 'Error: 1', + 'Warning: 2', 'Error: bar']) + + def test_errors_context(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + self.assertEqual(errors.get_context(), None) + with errors.context('foo', 1): + self.assertEqual(errors.get_context(), ('foo', 1)) + errors.error('a') + with errors.context('bar', 2): + self.assertEqual(errors.get_context(), ('bar', 2)) + errors.error('b') + self.assertEqual(errors.get_context(), ('foo', 1)) + errors.error('c') + self.assertEqual(self.get_output(), [ + 'Error: foo:1: a', + 'Error: bar:2: b', + 'Error: foo:1: c', + ]) + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py new file mode 100644 index 000000000..6fd617828 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_files.py @@ -0,0 +1,1160 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozbuild.util import ensureParentDir + +from mozpack.errors import ( + ErrorMessage, + errors, +) +from mozpack.files import ( + AbsoluteSymlinkFile, + ComposedFinder, + DeflatedFile, + Dest, + ExistingFile, + ExtractedTarFile, + FileFinder, + File, + GeneratedFile, + JarFinder, + TarFinder, + ManifestFile, + MercurialFile, + MercurialRevisionFinder, + MinifiedJavaScript, + MinifiedProperties, + PreprocessedFile, + XPTFile, +) + +# We don't have hglib installed everywhere. +try: + import hglib +except ImportError: + hglib = None + +try: + from mozpack.hg import MercurialNativeRevisionFinder +except ImportError: + MercurialNativeRevisionFinder = None + +from mozpack.mozjar import ( + JarReader, + JarWriter, +) +from mozpack.chrome.manifest import ( + ManifestContent, + ManifestResource, + ManifestLocale, + ManifestOverride, +) +import unittest +import mozfile +import mozunit +import os +import random +import string +import sys +import tarfile +import mozpack.path as mozpath +from tempfile import mkdtemp +from io import BytesIO +from StringIO import StringIO +from xpt import Typelib + + +class TestWithTmpDir(unittest.TestCase): + def setUp(self): + self.tmpdir = mkdtemp() + + self.symlink_supported = False + + if not hasattr(os, 'symlink'): + return + + dummy_path = self.tmppath('dummy_file') + with open(dummy_path, 'a'): + pass + + try: + os.symlink(dummy_path, self.tmppath('dummy_symlink')) + os.remove(self.tmppath('dummy_symlink')) + except EnvironmentError: + pass + finally: + os.remove(dummy_path) + + self.symlink_supported = True + + + def tearDown(self): + mozfile.rmtree(self.tmpdir) + + def tmppath(self, relpath): + return os.path.normpath(os.path.join(self.tmpdir, relpath)) + + +class MockDest(BytesIO, Dest): + def __init__(self): + BytesIO.__init__(self) + self.mode = None + + def read(self, length=-1): + if self.mode != 'r': + self.seek(0) + self.mode = 'r' + return BytesIO.read(self, length) + + def write(self, data): + if self.mode != 'w': + self.seek(0) + self.truncate(0) + self.mode = 'w' + return BytesIO.write(self, data) + + def exists(self): + return True + + def close(self): + if self.mode: + self.mode = None + + +class DestNoWrite(Dest): + def write(self, data): + raise RuntimeError + + +class TestDest(TestWithTmpDir): + def test_dest(self): + dest = Dest(self.tmppath('dest')) + self.assertFalse(dest.exists()) + dest.write('foo') + self.assertTrue(dest.exists()) + dest.write('foo') + self.assertEqual(dest.read(4), 'foof') + self.assertEqual(dest.read(), 'oo') + self.assertEqual(dest.read(), '') + dest.write('bar') + self.assertEqual(dest.read(4), 'bar') + dest.close() + self.assertEqual(dest.read(), 'bar') + dest.write('foo') + dest.close() + dest.write('qux') + self.assertEqual(dest.read(), 'qux') + +rand = ''.join(random.choice(string.letters) for i in xrange(131597)) +samples = [ + '', + 'test', + 'fooo', + 'same', + 'same', + 'Different and longer', + rand, + rand, + rand[:-1] + '_', + 'test' +] + + +class TestFile(TestWithTmpDir): + def test_file(self): + ''' + Check that File.copy yields the proper content in the destination file + in all situations that trigger different code paths: + - different content + - different content of the same size + - same content + - long content + ''' + src = self.tmppath('src') + dest = self.tmppath('dest') + + for content in samples: + with open(src, 'wb') as tmp: + tmp.write(content) + # Ensure the destination file, when it exists, is older than the + # source + if os.path.exists(dest): + time = os.path.getmtime(src) - 1 + os.utime(dest, (time, time)) + f = File(src) + f.copy(dest) + self.assertEqual(content, open(dest, 'rb').read()) + self.assertEqual(content, f.open().read()) + self.assertEqual(content, f.open().read()) + + def test_file_dest(self): + ''' + Similar to test_file, but for a destination object instead of + a destination file. This ensures the destination object is being + used properly by File.copy, ensuring that other subclasses of Dest + will work. + ''' + src = self.tmppath('src') + dest = MockDest() + + for content in samples: + with open(src, 'wb') as tmp: + tmp.write(content) + f = File(src) + f.copy(dest) + self.assertEqual(content, dest.getvalue()) + + def test_file_open(self): + ''' + Test whether File.open returns an appropriately reset file object. + ''' + src = self.tmppath('src') + content = ''.join(samples) + with open(src, 'wb') as tmp: + tmp.write(content) + + f = File(src) + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + def test_file_no_write(self): + ''' + Test various conditions where File.copy is expected not to write + in the destination file. + ''' + src = self.tmppath('src') + dest = self.tmppath('dest') + + with open(src, 'wb') as tmp: + tmp.write('test') + + # Initial copy + f = File(src) + f.copy(dest) + + # Ensure subsequent copies won't trigger writes + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # When the source file is newer, but with the same content, no copy + # should occur + time = os.path.getmtime(src) - 1 + os.utime(dest, (time, time)) + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # When the source file is older than the destination file, even with + # different content, no copy should occur. + with open(src, 'wb') as tmp: + tmp.write('fooo') + time = os.path.getmtime(dest) - 1 + os.utime(src, (time, time)) + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # Double check that under conditions where a copy occurs, we would get + # an exception. + time = os.path.getmtime(src) - 1 + os.utime(dest, (time, time)) + self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) + + # skip_if_older=False is expected to force a copy in this situation. + f.copy(dest, skip_if_older=False) + self.assertEqual('fooo', open(dest, 'rb').read()) + + +class TestAbsoluteSymlinkFile(TestWithTmpDir): + def test_absolute_relative(self): + AbsoluteSymlinkFile('/foo') + + with self.assertRaisesRegexp(ValueError, 'Symlink target not absolute'): + AbsoluteSymlinkFile('./foo') + + def test_symlink_file(self): + source = self.tmppath('test_path') + with open(source, 'wt') as fh: + fh.write('Hello world') + + s = AbsoluteSymlinkFile(source) + dest = self.tmppath('symlink') + self.assertTrue(s.copy(dest)) + + if self.symlink_supported: + self.assertTrue(os.path.islink(dest)) + link = os.readlink(dest) + self.assertEqual(link, source) + else: + self.assertTrue(os.path.isfile(dest)) + content = open(dest).read() + self.assertEqual(content, 'Hello world') + + def test_replace_file_with_symlink(self): + # If symlinks are supported, an existing file should be replaced by a + # symlink. + source = self.tmppath('test_path') + with open(source, 'wt') as fh: + fh.write('source') + + dest = self.tmppath('dest') + with open(dest, 'a'): + pass + + s = AbsoluteSymlinkFile(source) + s.copy(dest, skip_if_older=False) + + if self.symlink_supported: + self.assertTrue(os.path.islink(dest)) + link = os.readlink(dest) + self.assertEqual(link, source) + else: + self.assertTrue(os.path.isfile(dest)) + content = open(dest).read() + self.assertEqual(content, 'source') + + def test_replace_symlink(self): + if not self.symlink_supported: + return + + source = self.tmppath('source') + with open(source, 'a'): + pass + + dest = self.tmppath('dest') + + os.symlink(self.tmppath('bad'), dest) + self.assertTrue(os.path.islink(dest)) + + s = AbsoluteSymlinkFile(source) + self.assertTrue(s.copy(dest)) + + self.assertTrue(os.path.islink(dest)) + link = os.readlink(dest) + self.assertEqual(link, source) + + def test_noop(self): + if not hasattr(os, 'symlink'): + return + + source = self.tmppath('source') + dest = self.tmppath('dest') + + with open(source, 'a'): + pass + + os.symlink(source, dest) + link = os.readlink(dest) + self.assertEqual(link, source) + + s = AbsoluteSymlinkFile(source) + self.assertFalse(s.copy(dest)) + + link = os.readlink(dest) + self.assertEqual(link, source) + +class TestPreprocessedFile(TestWithTmpDir): + def test_preprocess(self): + ''' + Test that copying the file invokes the preprocessor + ''' + src = self.tmppath('src') + dest = self.tmppath('dest') + + with open(src, 'wb') as tmp: + tmp.write('#ifdef FOO\ntest\n#endif') + + f = PreprocessedFile(src, depfile_path=None, marker='#', defines={'FOO': True}) + self.assertTrue(f.copy(dest)) + + self.assertEqual('test\n', open(dest, 'rb').read()) + + def test_preprocess_file_no_write(self): + ''' + Test various conditions where PreprocessedFile.copy is expected not to + write in the destination file. + ''' + src = self.tmppath('src') + dest = self.tmppath('dest') + depfile = self.tmppath('depfile') + + with open(src, 'wb') as tmp: + tmp.write('#ifdef FOO\ntest\n#endif') + + # Initial copy + f = PreprocessedFile(src, depfile_path=depfile, marker='#', defines={'FOO': True}) + self.assertTrue(f.copy(dest)) + + # Ensure subsequent copies won't trigger writes + self.assertFalse(f.copy(DestNoWrite(dest))) + self.assertEqual('test\n', open(dest, 'rb').read()) + + # When the source file is older than the destination file, even with + # different content, no copy should occur. + with open(src, 'wb') as tmp: + tmp.write('#ifdef FOO\nfooo\n#endif') + time = os.path.getmtime(dest) - 1 + os.utime(src, (time, time)) + self.assertFalse(f.copy(DestNoWrite(dest))) + self.assertEqual('test\n', open(dest, 'rb').read()) + + # skip_if_older=False is expected to force a copy in this situation. + self.assertTrue(f.copy(dest, skip_if_older=False)) + self.assertEqual('fooo\n', open(dest, 'rb').read()) + + def test_preprocess_file_dependencies(self): + ''' + Test that the preprocess runs if the dependencies of the source change + ''' + src = self.tmppath('src') + dest = self.tmppath('dest') + incl = self.tmppath('incl') + deps = self.tmppath('src.pp') + + with open(src, 'wb') as tmp: + tmp.write('#ifdef FOO\ntest\n#endif') + + with open(incl, 'wb') as tmp: + tmp.write('foo bar') + + # Initial copy + f = PreprocessedFile(src, depfile_path=deps, marker='#', defines={'FOO': True}) + self.assertTrue(f.copy(dest)) + + # Update the source so it #includes the include file. + with open(src, 'wb') as tmp: + tmp.write('#include incl\n') + time = os.path.getmtime(dest) + 1 + os.utime(src, (time, time)) + self.assertTrue(f.copy(dest)) + self.assertEqual('foo bar', open(dest, 'rb').read()) + + # If one of the dependencies changes, the file should be updated. The + # mtime of the dependency is set after the destination file, to avoid + # both files having the same time. + with open(incl, 'wb') as tmp: + tmp.write('quux') + time = os.path.getmtime(dest) + 1 + os.utime(incl, (time, time)) + self.assertTrue(f.copy(dest)) + self.assertEqual('quux', open(dest, 'rb').read()) + + # Perform one final copy to confirm that we don't run the preprocessor + # again. We update the mtime of the destination so it's newer than the + # input files. This would "just work" if we weren't changing + time = os.path.getmtime(incl) + 1 + os.utime(dest, (time, time)) + self.assertFalse(f.copy(DestNoWrite(dest))) + + def test_replace_symlink(self): + ''' + Test that if the destination exists, and is a symlink, the target of + the symlink is not overwritten by the preprocessor output. + ''' + if not self.symlink_supported: + return + + source = self.tmppath('source') + dest = self.tmppath('dest') + pp_source = self.tmppath('pp_in') + deps = self.tmppath('deps') + + with open(source, 'a'): + pass + + os.symlink(source, dest) + self.assertTrue(os.path.islink(dest)) + + with open(pp_source, 'wb') as tmp: + tmp.write('#define FOO\nPREPROCESSED') + + f = PreprocessedFile(pp_source, depfile_path=deps, marker='#', + defines={'FOO': True}) + self.assertTrue(f.copy(dest)) + + self.assertEqual('PREPROCESSED', open(dest, 'rb').read()) + self.assertFalse(os.path.islink(dest)) + self.assertEqual('', open(source, 'rb').read()) + +class TestExistingFile(TestWithTmpDir): + def test_required_missing_dest(self): + with self.assertRaisesRegexp(ErrorMessage, 'Required existing file'): + f = ExistingFile(required=True) + f.copy(self.tmppath('dest')) + + def test_required_existing_dest(self): + p = self.tmppath('dest') + with open(p, 'a'): + pass + + f = ExistingFile(required=True) + f.copy(p) + + def test_optional_missing_dest(self): + f = ExistingFile(required=False) + f.copy(self.tmppath('dest')) + + def test_optional_existing_dest(self): + p = self.tmppath('dest') + with open(p, 'a'): + pass + + f = ExistingFile(required=False) + f.copy(p) + + +class TestGeneratedFile(TestWithTmpDir): + def test_generated_file(self): + ''' + Check that GeneratedFile.copy yields the proper content in the + destination file in all situations that trigger different code paths + (see TestFile.test_file) + ''' + dest = self.tmppath('dest') + + for content in samples: + f = GeneratedFile(content) + f.copy(dest) + self.assertEqual(content, open(dest, 'rb').read()) + + def test_generated_file_open(self): + ''' + Test whether GeneratedFile.open returns an appropriately reset file + object. + ''' + content = ''.join(samples) + f = GeneratedFile(content) + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + def test_generated_file_no_write(self): + ''' + Test various conditions where GeneratedFile.copy is expected not to + write in the destination file. + ''' + dest = self.tmppath('dest') + + # Initial copy + f = GeneratedFile('test') + f.copy(dest) + + # Ensure subsequent copies won't trigger writes + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # When using a new instance with the same content, no copy should occur + f = GeneratedFile('test') + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # Double check that under conditions where a copy occurs, we would get + # an exception. + f = GeneratedFile('fooo') + self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) + + +class TestDeflatedFile(TestWithTmpDir): + def test_deflated_file(self): + ''' + Check that DeflatedFile.copy yields the proper content in the + destination file in all situations that trigger different code paths + (see TestFile.test_file) + ''' + src = self.tmppath('src.jar') + dest = self.tmppath('dest') + + contents = {} + with JarWriter(src) as jar: + for content in samples: + name = ''.join(random.choice(string.letters) + for i in xrange(8)) + jar.add(name, content, compress=True) + contents[name] = content + + for j in JarReader(src): + f = DeflatedFile(j) + f.copy(dest) + self.assertEqual(contents[j.filename], open(dest, 'rb').read()) + + def test_deflated_file_open(self): + ''' + Test whether DeflatedFile.open returns an appropriately reset file + object. + ''' + src = self.tmppath('src.jar') + content = ''.join(samples) + with JarWriter(src) as jar: + jar.add('content', content) + + f = DeflatedFile(JarReader(src)['content']) + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + def test_deflated_file_no_write(self): + ''' + Test various conditions where DeflatedFile.copy is expected not to + write in the destination file. + ''' + src = self.tmppath('src.jar') + dest = self.tmppath('dest') + + with JarWriter(src) as jar: + jar.add('test', 'test') + jar.add('test2', 'test') + jar.add('fooo', 'fooo') + + jar = JarReader(src) + # Initial copy + f = DeflatedFile(jar['test']) + f.copy(dest) + + # Ensure subsequent copies won't trigger writes + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # When using a different file with the same content, no copy should + # occur + f = DeflatedFile(jar['test2']) + f.copy(DestNoWrite(dest)) + self.assertEqual('test', open(dest, 'rb').read()) + + # Double check that under conditions where a copy occurs, we would get + # an exception. + f = DeflatedFile(jar['fooo']) + self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) + + +class TestManifestFile(TestWithTmpDir): + def test_manifest_file(self): + f = ManifestFile('chrome') + f.add(ManifestContent('chrome', 'global', 'toolkit/content/global/')) + f.add(ManifestResource('chrome', 'gre-resources', 'toolkit/res/')) + f.add(ManifestResource('chrome/pdfjs', 'pdfjs', './')) + f.add(ManifestContent('chrome/pdfjs', 'pdfjs', 'pdfjs')) + f.add(ManifestLocale('chrome', 'browser', 'en-US', + 'en-US/locale/browser/')) + + f.copy(self.tmppath('chrome.manifest')) + self.assertEqual(open(self.tmppath('chrome.manifest')).readlines(), [ + 'content global toolkit/content/global/\n', + 'resource gre-resources toolkit/res/\n', + 'resource pdfjs pdfjs/\n', + 'content pdfjs pdfjs/pdfjs\n', + 'locale browser en-US en-US/locale/browser/\n', + ]) + + self.assertRaises( + ValueError, + f.remove, + ManifestContent('', 'global', 'toolkit/content/global/') + ) + self.assertRaises( + ValueError, + f.remove, + ManifestOverride('chrome', 'chrome://global/locale/netError.dtd', + 'chrome://browser/locale/netError.dtd') + ) + + f.remove(ManifestContent('chrome', 'global', + 'toolkit/content/global/')) + self.assertRaises( + ValueError, + f.remove, + ManifestContent('chrome', 'global', 'toolkit/content/global/') + ) + + f.copy(self.tmppath('chrome.manifest')) + content = open(self.tmppath('chrome.manifest')).read() + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + +# Compiled typelib for the following IDL: +# interface foo; +# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)] +# interface bar { +# void bar(in foo f); +# }; +# We need to make this [scriptable] so it doesn't get deleted from the +# typelib. We don't need to make the foo interfaces below [scriptable], +# because they will be automatically included by virtue of being an +# argument to a method of |bar|. +bar_xpt = GeneratedFile( + b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' + + b'\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C' + + b'\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + + b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F' + + b'\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00' + + b'\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00' + + b'\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00' + + b'\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80' +) + +# Compiled typelib for the following IDL: +# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)] +# interface foo { +# void foo(); +# }; +foo_xpt = GeneratedFile( + b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' + + b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' + + b'\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA' + + b'\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' + + b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' + + b'\x05\x00\x80\x06\x00\x00\x00' +) + +# Compiled typelib for the following IDL: +# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)] +# interface foo { +# void foo(); +# }; +foo2_xpt = GeneratedFile( + b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' + + b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' + + b'\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39' + + b'\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' + + b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' + + b'\x05\x00\x80\x06\x00\x00\x00' +) + + +def read_interfaces(file): + return dict((i.name, i) for i in Typelib.read(file).interfaces) + + +class TestXPTFile(TestWithTmpDir): + def test_xpt_file(self): + x = XPTFile() + x.add(foo_xpt) + x.add(bar_xpt) + x.copy(self.tmppath('interfaces.xpt')) + + foo = read_interfaces(foo_xpt.open()) + foo2 = read_interfaces(foo2_xpt.open()) + bar = read_interfaces(bar_xpt.open()) + linked = read_interfaces(self.tmppath('interfaces.xpt')) + self.assertEqual(foo['foo'], linked['foo']) + self.assertEqual(bar['bar'], linked['bar']) + + x.remove(foo_xpt) + x.copy(self.tmppath('interfaces2.xpt')) + linked = read_interfaces(self.tmppath('interfaces2.xpt')) + self.assertEqual(bar['foo'], linked['foo']) + self.assertEqual(bar['bar'], linked['bar']) + + x.add(foo_xpt) + x.copy(DestNoWrite(self.tmppath('interfaces.xpt'))) + linked = read_interfaces(self.tmppath('interfaces.xpt')) + self.assertEqual(foo['foo'], linked['foo']) + self.assertEqual(bar['bar'], linked['bar']) + + x = XPTFile() + x.add(foo2_xpt) + x.add(bar_xpt) + x.copy(self.tmppath('interfaces.xpt')) + linked = read_interfaces(self.tmppath('interfaces.xpt')) + self.assertEqual(foo2['foo'], linked['foo']) + self.assertEqual(bar['bar'], linked['bar']) + + x = XPTFile() + x.add(foo_xpt) + x.add(foo2_xpt) + x.add(bar_xpt) + from xpt import DataError + self.assertRaises(DataError, x.copy, self.tmppath('interfaces.xpt')) + + +class TestMinifiedProperties(TestWithTmpDir): + def test_minified_properties(self): + propLines = [ + '# Comments are removed', + 'foo = bar', + '', + '# Another comment', + ] + prop = GeneratedFile('\n'.join(propLines)) + self.assertEqual(MinifiedProperties(prop).open().readlines(), + ['foo = bar\n', '\n']) + open(self.tmppath('prop'), 'wb').write('\n'.join(propLines)) + MinifiedProperties(File(self.tmppath('prop'))) \ + .copy(self.tmppath('prop2')) + self.assertEqual(open(self.tmppath('prop2')).readlines(), + ['foo = bar\n', '\n']) + + +class TestMinifiedJavaScript(TestWithTmpDir): + orig_lines = [ + '// Comment line', + 'let foo = "bar";', + 'var bar = true;', + '', + '// Another comment', + ] + + def test_minified_javascript(self): + orig_f = GeneratedFile('\n'.join(self.orig_lines)) + min_f = MinifiedJavaScript(orig_f) + + mini_lines = min_f.open().readlines() + self.assertTrue(mini_lines) + self.assertTrue(len(mini_lines) < len(self.orig_lines)) + + def _verify_command(self, code): + our_dir = os.path.abspath(os.path.dirname(__file__)) + return [ + sys.executable, + os.path.join(our_dir, 'support', 'minify_js_verify.py'), + code, + ] + + def test_minified_verify_success(self): + orig_f = GeneratedFile('\n'.join(self.orig_lines)) + min_f = MinifiedJavaScript(orig_f, + verify_command=self._verify_command('0')) + + mini_lines = min_f.open().readlines() + self.assertTrue(mini_lines) + self.assertTrue(len(mini_lines) < len(self.orig_lines)) + + def test_minified_verify_failure(self): + orig_f = GeneratedFile('\n'.join(self.orig_lines)) + errors.out = StringIO() + min_f = MinifiedJavaScript(orig_f, + verify_command=self._verify_command('1')) + + mini_lines = min_f.open().readlines() + output = errors.out.getvalue() + errors.out = sys.stderr + self.assertEqual(output, + 'Warning: JS minification verification failed for <unknown>:\n' + 'Warning: Error message\n') + self.assertEqual(mini_lines, orig_f.open().readlines()) + + +class MatchTestTemplate(object): + def prepare_match_test(self, with_dotfiles=False): + self.add('bar') + self.add('foo/bar') + self.add('foo/baz') + self.add('foo/qux/1') + self.add('foo/qux/bar') + self.add('foo/qux/2/test') + self.add('foo/qux/2/test2') + if with_dotfiles: + self.add('foo/.foo') + self.add('foo/.bar/foo') + + def do_match_test(self): + self.do_check('', [ + 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', + 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('*', [ + 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', + 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('foo/qux', [ + 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('foo/b*', ['foo/bar', 'foo/baz']) + self.do_check('baz', []) + self.do_check('foo/foo', []) + self.do_check('foo/*ar', ['foo/bar']) + self.do_check('*ar', ['bar']) + self.do_check('*/bar', ['foo/bar']) + self.do_check('foo/*ux', [ + 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('foo/q*ux', [ + 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('foo/*/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2']) + self.do_check('**/bar', ['bar', 'foo/bar', 'foo/qux/bar']) + self.do_check('foo/**/test', ['foo/qux/2/test']) + self.do_check('foo', [ + 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', + 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('foo/**', [ + 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', + 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('**/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2']) + self.do_check('**/foo', [ + 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', + 'foo/qux/2/test', 'foo/qux/2/test2' + ]) + self.do_check('**/barbaz', []) + self.do_check('f**/bar', ['foo/bar']) + + def do_finder_test(self, finder): + self.assertTrue(finder.contains('foo/.foo')) + self.assertTrue(finder.contains('foo/.bar')) + self.assertTrue('foo/.foo' in [f for f, c in + finder.find('foo/.foo')]) + self.assertTrue('foo/.bar/foo' in [f for f, c in + finder.find('foo/.bar')]) + self.assertEqual(sorted([f for f, c in finder.find('foo/.*')]), + ['foo/.bar/foo', 'foo/.foo']) + for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']: + self.assertFalse('foo/.foo' in [f for f, c in + finder.find(pattern)]) + self.assertFalse('foo/.bar/foo' in [f for f, c in + finder.find(pattern)]) + self.assertEqual(sorted([f for f, c in finder.find(pattern)]), + sorted([f for f, c in finder + if mozpath.match(f, pattern)])) + + +def do_check(test, finder, pattern, result): + if result: + test.assertTrue(finder.contains(pattern)) + else: + test.assertFalse(finder.contains(pattern)) + test.assertEqual(sorted(list(f for f, c in finder.find(pattern))), + sorted(result)) + + +class TestFileFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path): + ensureParentDir(self.tmppath(path)) + open(self.tmppath(path), 'wb').write(path) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def test_file_finder(self): + self.prepare_match_test(with_dotfiles=True) + self.finder = FileFinder(self.tmpdir) + self.do_match_test() + self.do_finder_test(self.finder) + + def test_get(self): + self.prepare_match_test() + finder = FileFinder(self.tmpdir) + + self.assertIsNone(finder.get('does-not-exist')) + res = finder.get('bar') + self.assertIsInstance(res, File) + self.assertEqual(mozpath.normpath(res.path), + mozpath.join(self.tmpdir, 'bar')) + + def test_ignored_dirs(self): + """Ignored directories should not have results returned.""" + self.prepare_match_test() + self.add('fooz') + + # Present to ensure prefix matching doesn't exclude. + self.add('foo/quxz') + + self.finder = FileFinder(self.tmpdir, ignore=['foo/qux']) + + self.do_check('**', ['bar', 'foo/bar', 'foo/baz', 'foo/quxz', 'fooz']) + self.do_check('foo/*', ['foo/bar', 'foo/baz', 'foo/quxz']) + self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz']) + self.do_check('foo/qux/**', []) + self.do_check('foo/qux/*', []) + self.do_check('foo/qux/bar', []) + self.do_check('foo/quxz', ['foo/quxz']) + self.do_check('fooz', ['fooz']) + + def test_ignored_files(self): + """Ignored files should not have results returned.""" + self.prepare_match_test() + + # Be sure prefix match doesn't get ignored. + self.add('barz') + + self.finder = FileFinder(self.tmpdir, ignore=['foo/bar', 'bar']) + self.do_check('**', ['barz', 'foo/baz', 'foo/qux/1', 'foo/qux/2/test', + 'foo/qux/2/test2', 'foo/qux/bar']) + self.do_check('foo/**', ['foo/baz', 'foo/qux/1', 'foo/qux/2/test', + 'foo/qux/2/test2', 'foo/qux/bar']) + + def test_ignored_patterns(self): + """Ignore entries with patterns should be honored.""" + self.prepare_match_test() + + self.add('foo/quxz') + + self.finder = FileFinder(self.tmpdir, ignore=['foo/qux/*']) + self.do_check('**', ['foo/bar', 'foo/baz', 'foo/quxz', 'bar']) + self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz']) + + def test_dotfiles(self): + """Finder can find files beginning with . is configured.""" + self.prepare_match_test(with_dotfiles=True) + self.finder = FileFinder(self.tmpdir, find_dotfiles=True) + self.do_check('**', ['bar', 'foo/.foo', 'foo/.bar/foo', + 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', + 'foo/qux/2/test', 'foo/qux/2/test2']) + + def test_dotfiles_plus_ignore(self): + self.prepare_match_test(with_dotfiles=True) + self.finder = FileFinder(self.tmpdir, find_dotfiles=True, + ignore=['foo/.bar/**']) + self.do_check('foo/**', ['foo/.foo', 'foo/bar', 'foo/baz', + 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2']) + + +class TestJarFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path): + self.jar.add(path, path, compress=True) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def test_jar_finder(self): + self.jar = JarWriter(file=self.tmppath('test.jar')) + self.prepare_match_test() + self.jar.finish() + reader = JarReader(file=self.tmppath('test.jar')) + self.finder = JarFinder(self.tmppath('test.jar'), reader) + self.do_match_test() + + self.assertIsNone(self.finder.get('does-not-exist')) + self.assertIsInstance(self.finder.get('bar'), DeflatedFile) + +class TestTarFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path): + self.tar.addfile(tarfile.TarInfo(name=path)) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def test_tar_finder(self): + self.tar = tarfile.open(name=self.tmppath('test.tar.bz2'), + mode='w:bz2') + self.prepare_match_test() + self.tar.close() + with tarfile.open(name=self.tmppath('test.tar.bz2'), + mode='r:bz2') as tarreader: + self.finder = TarFinder(self.tmppath('test.tar.bz2'), tarreader) + self.do_match_test() + + self.assertIsNone(self.finder.get('does-not-exist')) + self.assertIsInstance(self.finder.get('bar'), ExtractedTarFile) + + +class TestComposedFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path, content=None): + # Put foo/qux files under $tmp/b. + if path.startswith('foo/qux/'): + real_path = mozpath.join('b', path[8:]) + else: + real_path = mozpath.join('a', path) + ensureParentDir(self.tmppath(real_path)) + if not content: + content = path + open(self.tmppath(real_path), 'wb').write(content) + + def do_check(self, pattern, result): + if '*' in pattern: + return + do_check(self, self.finder, pattern, result) + + def test_composed_finder(self): + self.prepare_match_test() + # Also add files in $tmp/a/foo/qux because ComposedFinder is + # expected to mask foo/qux entirely with content from $tmp/b. + ensureParentDir(self.tmppath('a/foo/qux/hoge')) + open(self.tmppath('a/foo/qux/hoge'), 'wb').write('hoge') + open(self.tmppath('a/foo/qux/bar'), 'wb').write('not the right content') + self.finder = ComposedFinder({ + '': FileFinder(self.tmppath('a')), + 'foo/qux': FileFinder(self.tmppath('b')), + }) + self.do_match_test() + + self.assertIsNone(self.finder.get('does-not-exist')) + self.assertIsInstance(self.finder.get('bar'), File) + + +@unittest.skipUnless(hglib, 'hglib not available') +class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir): + def setUp(self): + super(TestMercurialRevisionFinder, self).setUp() + hglib.init(self.tmpdir) + + def add(self, path): + c = hglib.open(self.tmpdir) + ensureParentDir(self.tmppath(path)) + with open(self.tmppath(path), 'wb') as fh: + fh.write(path) + c.add(self.tmppath(path)) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def _get_finder(self, *args, **kwargs): + return MercurialRevisionFinder(*args, **kwargs) + + def test_default_revision(self): + self.prepare_match_test() + c = hglib.open(self.tmpdir) + c.commit('initial commit') + self.finder = self._get_finder(self.tmpdir) + self.do_match_test() + + self.assertIsNone(self.finder.get('does-not-exist')) + self.assertIsInstance(self.finder.get('bar'), MercurialFile) + + def test_old_revision(self): + c = hglib.open(self.tmpdir) + with open(self.tmppath('foo'), 'wb') as fh: + fh.write('foo initial') + c.add(self.tmppath('foo')) + c.commit('initial') + + with open(self.tmppath('foo'), 'wb') as fh: + fh.write('foo second') + with open(self.tmppath('bar'), 'wb') as fh: + fh.write('bar second') + c.add(self.tmppath('bar')) + c.commit('second') + # This wipes out the working directory, ensuring the finder isn't + # finding anything from the filesystem. + c.rawcommand(['update', 'null']) + + finder = self._get_finder(self.tmpdir, 0) + f = finder.get('foo') + self.assertEqual(f.read(), 'foo initial') + self.assertEqual(f.read(), 'foo initial', 'read again for good measure') + self.assertIsNone(finder.get('bar')) + + finder = MercurialRevisionFinder(self.tmpdir, rev=1) + f = finder.get('foo') + self.assertEqual(f.read(), 'foo second') + f = finder.get('bar') + self.assertEqual(f.read(), 'bar second') + + def test_recognize_repo_paths(self): + c = hglib.open(self.tmpdir) + with open(self.tmppath('foo'), 'wb') as fh: + fh.write('initial') + c.add(self.tmppath('foo')) + c.commit('initial') + c.rawcommand(['update', 'null']) + + finder = self._get_finder(self.tmpdir, 0, + recognize_repo_paths=True) + with self.assertRaises(NotImplementedError): + list(finder.find('')) + + with self.assertRaises(ValueError): + finder.get('foo') + with self.assertRaises(ValueError): + finder.get('') + + f = finder.get(self.tmppath('foo')) + self.assertIsInstance(f, MercurialFile) + self.assertEqual(f.read(), 'initial') + + +@unittest.skipUnless(MercurialNativeRevisionFinder, 'hgnative not available') +class TestMercurialNativeRevisionFinder(TestMercurialRevisionFinder): + def _get_finder(self, *args, **kwargs): + return MercurialNativeRevisionFinder(*args, **kwargs) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py new file mode 100644 index 000000000..b785d014a --- /dev/null +++ b/python/mozbuild/mozpack/test/test_manifests.py @@ -0,0 +1,375 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import unicode_literals + +import os + +import mozunit + +from mozpack.copier import ( + FileCopier, + FileRegistry, +) +from mozpack.manifests import ( + InstallManifest, + UnreadableInstallManifest, +) +from mozpack.test.test_files import TestWithTmpDir + + +class TestInstallManifest(TestWithTmpDir): + def test_construct(self): + m = InstallManifest() + self.assertEqual(len(m), 0) + + def test_malformed(self): + f = self.tmppath('manifest') + open(f, 'wb').write('junk\n') + with self.assertRaises(UnreadableInstallManifest): + m = InstallManifest(f) + + def test_adds(self): + m = InstallManifest() + m.add_symlink('s_source', 's_dest') + m.add_copy('c_source', 'c_dest') + m.add_required_exists('e_dest') + m.add_optional_exists('o_dest') + m.add_pattern_symlink('ps_base', 'ps/*', 'ps_dest') + m.add_pattern_copy('pc_base', 'pc/**', 'pc_dest') + m.add_preprocess('p_source', 'p_dest', 'p_source.pp') + m.add_content('content', 'content') + + self.assertEqual(len(m), 8) + self.assertIn('s_dest', m) + self.assertIn('c_dest', m) + self.assertIn('p_dest', m) + self.assertIn('e_dest', m) + self.assertIn('o_dest', m) + self.assertIn('content', m) + + with self.assertRaises(ValueError): + m.add_symlink('s_other', 's_dest') + + with self.assertRaises(ValueError): + m.add_copy('c_other', 'c_dest') + + with self.assertRaises(ValueError): + m.add_preprocess('p_other', 'p_dest', 'p_other.pp') + + with self.assertRaises(ValueError): + m.add_required_exists('e_dest') + + with self.assertRaises(ValueError): + m.add_optional_exists('o_dest') + + with self.assertRaises(ValueError): + m.add_pattern_symlink('ps_base', 'ps/*', 'ps_dest') + + with self.assertRaises(ValueError): + m.add_pattern_copy('pc_base', 'pc/**', 'pc_dest') + + with self.assertRaises(ValueError): + m.add_content('content', 'content') + + def _get_test_manifest(self): + m = InstallManifest() + m.add_symlink(self.tmppath('s_source'), 's_dest') + m.add_copy(self.tmppath('c_source'), 'c_dest') + m.add_preprocess(self.tmppath('p_source'), 'p_dest', self.tmppath('p_source.pp'), '#', {'FOO':'BAR', 'BAZ':'QUX'}) + m.add_required_exists('e_dest') + m.add_optional_exists('o_dest') + m.add_pattern_symlink('ps_base', '*', 'ps_dest') + m.add_pattern_copy('pc_base', '**', 'pc_dest') + m.add_content('the content\non\nmultiple lines', 'content') + + return m + + def test_serialization(self): + m = self._get_test_manifest() + + p = self.tmppath('m') + m.write(path=p) + self.assertTrue(os.path.isfile(p)) + + with open(p, 'rb') as fh: + c = fh.read() + + self.assertEqual(c.count('\n'), 9) + + lines = c.splitlines() + self.assertEqual(len(lines), 9) + + self.assertEqual(lines[0], '5') + + m2 = InstallManifest(path=p) + self.assertEqual(m, m2) + p2 = self.tmppath('m2') + m2.write(path=p2) + + with open(p2, 'rb') as fh: + c2 = fh.read() + + self.assertEqual(c, c2) + + def test_populate_registry(self): + m = self._get_test_manifest() + r = FileRegistry() + m.populate_registry(r) + + self.assertEqual(len(r), 6) + self.assertEqual(r.paths(), ['c_dest', 'content', 'e_dest', 'o_dest', + 'p_dest', 's_dest']) + + def test_pattern_expansion(self): + source = self.tmppath('source') + os.mkdir(source) + os.mkdir('%s/base' % source) + os.mkdir('%s/base/foo' % source) + + with open('%s/base/foo/file1' % source, 'a'): + pass + + with open('%s/base/foo/file2' % source, 'a'): + pass + + m = InstallManifest() + m.add_pattern_symlink('%s/base' % source, '**', 'dest') + + c = FileCopier() + m.populate_registry(c) + self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2']) + + def test_or(self): + m1 = self._get_test_manifest() + orig_length = len(m1) + m2 = InstallManifest() + m2.add_symlink('s_source2', 's_dest2') + m2.add_copy('c_source2', 'c_dest2') + + m1 |= m2 + + self.assertEqual(len(m2), 2) + self.assertEqual(len(m1), orig_length + 2) + + self.assertIn('s_dest2', m1) + self.assertIn('c_dest2', m1) + + def test_copier_application(self): + dest = self.tmppath('dest') + os.mkdir(dest) + + to_delete = self.tmppath('dest/to_delete') + with open(to_delete, 'a'): + pass + + with open(self.tmppath('s_source'), 'wt') as fh: + fh.write('symlink!') + + with open(self.tmppath('c_source'), 'wt') as fh: + fh.write('copy!') + + with open(self.tmppath('p_source'), 'wt') as fh: + fh.write('#define FOO 1\npreprocess!') + + with open(self.tmppath('dest/e_dest'), 'a'): + pass + + with open(self.tmppath('dest/o_dest'), 'a'): + pass + + m = self._get_test_manifest() + c = FileCopier() + m.populate_registry(c) + result = c.copy(dest) + + self.assertTrue(os.path.exists(self.tmppath('dest/s_dest'))) + self.assertTrue(os.path.exists(self.tmppath('dest/c_dest'))) + self.assertTrue(os.path.exists(self.tmppath('dest/p_dest'))) + self.assertTrue(os.path.exists(self.tmppath('dest/e_dest'))) + self.assertTrue(os.path.exists(self.tmppath('dest/o_dest'))) + self.assertTrue(os.path.exists(self.tmppath('dest/content'))) + self.assertFalse(os.path.exists(to_delete)) + + with open(self.tmppath('dest/s_dest'), 'rt') as fh: + self.assertEqual(fh.read(), 'symlink!') + + with open(self.tmppath('dest/c_dest'), 'rt') as fh: + self.assertEqual(fh.read(), 'copy!') + + with open(self.tmppath('dest/p_dest'), 'rt') as fh: + self.assertEqual(fh.read(), 'preprocess!') + + self.assertEqual(result.updated_files, set(self.tmppath(p) for p in ( + 'dest/s_dest', 'dest/c_dest', 'dest/p_dest', 'dest/content'))) + self.assertEqual(result.existing_files, + set([self.tmppath('dest/e_dest'), self.tmppath('dest/o_dest')])) + self.assertEqual(result.removed_files, {to_delete}) + self.assertEqual(result.removed_directories, set()) + + def test_preprocessor(self): + manifest = self.tmppath('m') + deps = self.tmppath('m.pp') + dest = self.tmppath('dest') + include = self.tmppath('p_incl') + + with open(include, 'wt') as fh: + fh.write('#define INCL\n') + time = os.path.getmtime(include) - 3 + os.utime(include, (time, time)) + + with open(self.tmppath('p_source'), 'wt') as fh: + fh.write('#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n') + fh.write('#ifdef DEPTEST\nPASS2\n#endif\n') + fh.write('#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n') + time = os.path.getmtime(self.tmppath('p_source')) - 3 + os.utime(self.tmppath('p_source'), (time, time)) + + # Create and write a manifest with the preprocessed file, then apply it. + # This should write out our preprocessed file. + m = InstallManifest() + m.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'}) + m.write(path=manifest) + + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + c.copy(dest) + + self.assertTrue(os.path.exists(self.tmppath('dest/p_dest'))) + + with open(self.tmppath('dest/p_dest'), 'rt') as fh: + self.assertEqual(fh.read(), 'PASS1\n') + + # Create a second manifest with the preprocessed file, then apply it. + # Since this manifest does not exist on the disk, there should not be a + # dependency on it, and the preprocessed file should not be modified. + m2 = InstallManifest() + m2.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'DEPTEST':True}) + c = FileCopier() + m2.populate_registry(c) + result = c.copy(dest) + + self.assertFalse(self.tmppath('dest/p_dest') in result.updated_files) + self.assertTrue(self.tmppath('dest/p_dest') in result.existing_files) + + # Write out the second manifest, then load it back in from the disk. + # This should add the dependency on the manifest file, so our + # preprocessed file should be regenerated with the new defines. + # We also set the mtime on the destination file back, so it will be + # older than the manifest file. + m2.write(path=manifest) + time = os.path.getmtime(manifest) - 1 + os.utime(self.tmppath('dest/p_dest'), (time, time)) + m2 = InstallManifest(path=manifest) + c = FileCopier() + m2.populate_registry(c) + self.assertTrue(c.copy(dest)) + + with open(self.tmppath('dest/p_dest'), 'rt') as fh: + self.assertEqual(fh.read(), 'PASS2\n') + + # Set the time on the manifest back, so it won't be picked up as + # modified in the next test + time = os.path.getmtime(manifest) - 1 + os.utime(manifest, (time, time)) + + # Update the contents of a file included by the source file. This should + # cause the destination to be regenerated. + with open(include, 'wt') as fh: + fh.write('#define INCLTEST\n') + + time = os.path.getmtime(include) - 1 + os.utime(self.tmppath('dest/p_dest'), (time, time)) + c = FileCopier() + m2.populate_registry(c) + self.assertTrue(c.copy(dest)) + + with open(self.tmppath('dest/p_dest'), 'rt') as fh: + self.assertEqual(fh.read(), 'PASS2\nPASS3\n') + + def test_preprocessor_dependencies(self): + manifest = self.tmppath('m') + deps = self.tmppath('m.pp') + dest = self.tmppath('dest') + source = self.tmppath('p_source') + destfile = self.tmppath('dest/p_dest') + include = self.tmppath('p_incl') + os.mkdir(dest) + + with open(source, 'wt') as fh: + fh.write('#define SRC\nSOURCE\n') + time = os.path.getmtime(source) - 3 + os.utime(source, (time, time)) + + with open(include, 'wt') as fh: + fh.write('INCLUDE\n') + time = os.path.getmtime(source) - 3 + os.utime(include, (time, time)) + + # Create and write a manifest with the preprocessed file. + m = InstallManifest() + m.add_preprocess(source, 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'}) + m.write(path=manifest) + + time = os.path.getmtime(source) - 5 + os.utime(manifest, (time, time)) + + # Now read the manifest back in, and apply it. This should write out + # our preprocessed file. + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + self.assertTrue(c.copy(dest)) + + with open(destfile, 'rt') as fh: + self.assertEqual(fh.read(), 'SOURCE\n') + + # Next, modify the source to #INCLUDE another file. + with open(source, 'wt') as fh: + fh.write('SOURCE\n#include p_incl\n') + time = os.path.getmtime(source) - 1 + os.utime(destfile, (time, time)) + + # Apply the manifest, and confirm that it also reads the newly included + # file. + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + c.copy(dest) + + with open(destfile, 'rt') as fh: + self.assertEqual(fh.read(), 'SOURCE\nINCLUDE\n') + + # Set the time on the source file back, so it won't be picked up as + # modified in the next test. + time = os.path.getmtime(source) - 1 + os.utime(source, (time, time)) + + # Now, modify the include file (but not the original source). + with open(include, 'wt') as fh: + fh.write('INCLUDE MODIFIED\n') + time = os.path.getmtime(include) - 1 + os.utime(destfile, (time, time)) + + # Apply the manifest, and confirm that the change to the include file + # is detected. That should cause the preprocessor to run again. + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + c.copy(dest) + + with open(destfile, 'rt') as fh: + self.assertEqual(fh.read(), 'SOURCE\nINCLUDE MODIFIED\n') + + # ORing an InstallManifest should copy file dependencies + m = InstallManifest() + m |= InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + e = c._files['p_dest'] + self.assertEqual(e.extra_depends, [manifest]) + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py new file mode 100644 index 000000000..948403006 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_mozjar.py @@ -0,0 +1,342 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozpack.files import FileFinder +from mozpack.mozjar import ( + JarReaderError, + JarWriterError, + JarStruct, + JarReader, + JarWriter, + Deflater, + JarLog, +) +from collections import OrderedDict +from mozpack.test.test_files import MockDest +import unittest +import mozunit +from cStringIO import StringIO +from urllib import pathname2url +import mozpack.path as mozpath +import os + + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, 'data') + + +class TestJarStruct(unittest.TestCase): + class Foo(JarStruct): + MAGIC = 0x01020304 + STRUCT = OrderedDict([ + ('foo', 'uint32'), + ('bar', 'uint16'), + ('qux', 'uint16'), + ('length', 'uint16'), + ('length2', 'uint16'), + ('string', 'length'), + ('string2', 'length2'), + ]) + + def test_jar_struct(self): + foo = TestJarStruct.Foo() + self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC) + self.assertEqual(foo['foo'], 0) + self.assertEqual(foo['bar'], 0) + self.assertEqual(foo['qux'], 0) + self.assertFalse('length' in foo) + self.assertFalse('length2' in foo) + self.assertEqual(foo['string'], '') + self.assertEqual(foo['string2'], '') + + self.assertEqual(foo.size, 16) + + foo['foo'] = 0x42434445 + foo['bar'] = 0xabcd + foo['qux'] = 0xef01 + foo['string'] = 'abcde' + foo['string2'] = 'Arbitrarily long string' + + serialized = b'\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef' + \ + b'\x05\x00\x17\x00abcdeArbitrarily long string' + self.assertEqual(foo.size, len(serialized)) + foo_serialized = foo.serialize() + self.assertEqual(foo_serialized, serialized) + + def do_test_read_jar_struct(self, data): + self.assertRaises(JarReaderError, TestJarStruct.Foo, data) + self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:]) + + foo = TestJarStruct.Foo(data[1:]) + self.assertEqual(foo['foo'], 0x45444342) + self.assertEqual(foo['bar'], 0xcdab) + self.assertEqual(foo['qux'], 0x01ef) + self.assertFalse('length' in foo) + self.assertFalse('length2' in foo) + self.assertEqual(foo['string'], '012345') + self.assertEqual(foo['string2'], '67') + + def test_read_jar_struct(self): + data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \ + b'\x01\x06\x00\x02\x0001234567890' + self.do_test_read_jar_struct(data) + + def test_read_jar_struct_memoryview(self): + data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \ + b'\x01\x06\x00\x02\x0001234567890' + self.do_test_read_jar_struct(memoryview(data)) + + +class TestDeflater(unittest.TestCase): + def wrap(self, data): + return data + + def test_deflater_no_compress(self): + deflater = Deflater(False) + deflater.write(self.wrap('abc')) + self.assertFalse(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 3) + self.assertEqual(deflater.compressed_size, deflater.uncompressed_size) + self.assertEqual(deflater.compressed_data, 'abc') + self.assertEqual(deflater.crc32, 0x352441c2) + + def test_deflater_compress_no_gain(self): + deflater = Deflater(True) + deflater.write(self.wrap('abc')) + self.assertFalse(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 3) + self.assertEqual(deflater.compressed_size, deflater.uncompressed_size) + self.assertEqual(deflater.compressed_data, 'abc') + self.assertEqual(deflater.crc32, 0x352441c2) + + def test_deflater_compress(self): + deflater = Deflater(True) + deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz')) + self.assertTrue(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 26) + self.assertNotEqual(deflater.compressed_size, + deflater.uncompressed_size) + self.assertEqual(deflater.crc32, 0xd46b97ed) + # The CRC is the same as when not compressed + deflater = Deflater(False) + self.assertFalse(deflater.compressed) + deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz')) + self.assertEqual(deflater.crc32, 0xd46b97ed) + + +class TestDeflaterMemoryView(TestDeflater): + def wrap(self, data): + return memoryview(data) + + +class TestJar(unittest.TestCase): + optimize = False + + def test_jar(self): + s = MockDest() + with JarWriter(fileobj=s, optimize=self.optimize) as jar: + jar.add('foo', 'foo') + self.assertRaises(JarWriterError, jar.add, 'foo', 'bar') + jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') + jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False) + jar.add('baz\\backslash', 'aaaaaaaaaaaaaaa') + + files = [j for j in JarReader(fileobj=s)] + + self.assertEqual(files[0].filename, 'foo') + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), 'foo') + + self.assertEqual(files[1].filename, 'bar') + self.assertTrue(files[1].compressed) + self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') + + self.assertEqual(files[2].filename, 'baz/qux') + self.assertFalse(files[2].compressed) + self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') + + if os.sep == '\\': + self.assertEqual(files[3].filename, 'baz/backslash', + 'backslashes in filenames on Windows should get normalized') + else: + self.assertEqual(files[3].filename, 'baz\\backslash', + 'backslashes in filenames on POSIX platform are untouched') + + s = MockDest() + with JarWriter(fileobj=s, compress=False, + optimize=self.optimize) as jar: + jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') + jar.add('foo', 'foo') + jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True) + + jar = JarReader(fileobj=s) + files = [j for j in jar] + + self.assertEqual(files[0].filename, 'bar') + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') + + self.assertEqual(files[1].filename, 'foo') + self.assertFalse(files[1].compressed) + self.assertEqual(files[1].read(), 'foo') + + self.assertEqual(files[2].filename, 'baz/qux') + self.assertTrue(files[2].compressed) + self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') + + self.assertTrue('bar' in jar) + self.assertTrue('foo' in jar) + self.assertFalse('baz' in jar) + self.assertTrue('baz/qux' in jar) + self.assertTrue(jar['bar'], files[1]) + self.assertTrue(jar['foo'], files[0]) + self.assertTrue(jar['baz/qux'], files[2]) + + s.seek(0) + jar = JarReader(fileobj=s) + self.assertTrue('bar' in jar) + self.assertTrue('foo' in jar) + self.assertFalse('baz' in jar) + self.assertTrue('baz/qux' in jar) + + files[0].seek(0) + self.assertEqual(jar['bar'].filename, files[0].filename) + self.assertEqual(jar['bar'].compressed, files[0].compressed) + self.assertEqual(jar['bar'].read(), files[0].read()) + + files[1].seek(0) + self.assertEqual(jar['foo'].filename, files[1].filename) + self.assertEqual(jar['foo'].compressed, files[1].compressed) + self.assertEqual(jar['foo'].read(), files[1].read()) + + files[2].seek(0) + self.assertEqual(jar['baz/qux'].filename, files[2].filename) + self.assertEqual(jar['baz/qux'].compressed, files[2].compressed) + self.assertEqual(jar['baz/qux'].read(), files[2].read()) + + def test_rejar(self): + s = MockDest() + with JarWriter(fileobj=s, optimize=self.optimize) as jar: + jar.add('foo', 'foo') + jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') + jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False) + + new = MockDest() + with JarWriter(fileobj=new, optimize=self.optimize) as jar: + for j in JarReader(fileobj=s): + jar.add(j.filename, j) + + jar = JarReader(fileobj=new) + files = [j for j in jar] + + self.assertEqual(files[0].filename, 'foo') + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), 'foo') + + self.assertEqual(files[1].filename, 'bar') + self.assertTrue(files[1].compressed) + self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') + + self.assertEqual(files[2].filename, 'baz/qux') + self.assertTrue(files[2].compressed) + self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') + + def test_add_from_finder(self): + s = MockDest() + with JarWriter(fileobj=s, optimize=self.optimize) as jar: + finder = FileFinder(test_data_path) + for p, f in finder.find('test_data'): + jar.add('test_data', f) + + jar = JarReader(fileobj=s) + files = [j for j in jar] + + self.assertEqual(files[0].filename, 'test_data') + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), 'test_data') + + +class TestOptimizeJar(TestJar): + optimize = True + + +class TestPreload(unittest.TestCase): + def test_preload(self): + s = MockDest() + with JarWriter(fileobj=s) as jar: + jar.add('foo', 'foo') + jar.add('bar', 'abcdefghijklmnopqrstuvwxyz') + jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz') + + jar = JarReader(fileobj=s) + self.assertEqual(jar.last_preloaded, None) + + with JarWriter(fileobj=s) as jar: + jar.add('foo', 'foo') + jar.add('bar', 'abcdefghijklmnopqrstuvwxyz') + jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz') + jar.preload(['baz/qux', 'bar']) + + jar = JarReader(fileobj=s) + self.assertEqual(jar.last_preloaded, 'bar') + files = [j for j in jar] + + self.assertEqual(files[0].filename, 'baz/qux') + self.assertEqual(files[1].filename, 'bar') + self.assertEqual(files[2].filename, 'foo') + + +class TestJarLog(unittest.TestCase): + def test_jarlog(self): + base = 'file:' + pathname2url(os.path.abspath(os.curdir)) + s = StringIO('\n'.join([ + base + '/bar/baz.jar first', + base + '/bar/baz.jar second', + base + '/bar/baz.jar third', + base + '/bar/baz.jar second', + base + '/bar/baz.jar second', + 'jar:' + base + '/qux.zip!/omni.ja stuff', + base + '/bar/baz.jar first', + 'jar:' + base + '/qux.zip!/omni.ja other/stuff', + 'jar:' + base + '/qux.zip!/omni.ja stuff', + base + '/bar/baz.jar third', + 'jar:jar:' + base + '/qux.zip!/baz/baz.jar!/omni.ja nested/stuff', + 'jar:jar:jar:' + base + '/qux.zip!/baz/baz.jar!/foo.zip!/omni.ja' + + ' deeply/nested/stuff', + ])) + log = JarLog(fileobj=s) + canonicalize = lambda p: \ + mozpath.normsep(os.path.normcase(os.path.realpath(p))) + baz_jar = canonicalize('bar/baz.jar') + qux_zip = canonicalize('qux.zip') + self.assertEqual(set(log.keys()), set([ + baz_jar, + (qux_zip, 'omni.ja'), + (qux_zip, 'baz/baz.jar', 'omni.ja'), + (qux_zip, 'baz/baz.jar', 'foo.zip', 'omni.ja'), + ])) + self.assertEqual(log[baz_jar], [ + 'first', + 'second', + 'third', + ]) + self.assertEqual(log[(qux_zip, 'omni.ja')], [ + 'stuff', + 'other/stuff', + ]) + self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'omni.ja')], + ['nested/stuff']) + self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'foo.zip', + 'omni.ja')], ['deeply/nested/stuff']) + + # The above tests also indirectly check the value returned by + # JarLog.canonicalize for various jar: and file: urls, but + # JarLog.canonicalize also supports plain paths. + self.assertEqual(JarLog.canonicalize(os.path.abspath('bar/baz.jar')), + baz_jar) + self.assertEqual(JarLog.canonicalize('bar/baz.jar'), baz_jar) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py new file mode 100644 index 000000000..397f40538 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager.py @@ -0,0 +1,490 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest +import mozunit +import os +from mozpack.packager import ( + preprocess_manifest, + CallDeque, + Component, + SimplePackager, + SimpleManifestSink, +) +from mozpack.files import GeneratedFile +from mozpack.chrome.manifest import ( + ManifestBinaryComponent, + ManifestContent, + ManifestResource, +) +from mozunit import MockedOpen +from mozbuild.preprocessor import Preprocessor +from mozpack.errors import ( + errors, + ErrorMessage, +) +import mozpack.path as mozpath + +MANIFEST = ''' +bar/* +[foo] +foo/* +-foo/bar +chrome.manifest +[zot destdir="destdir"] +foo/zot +; comment +#ifdef baz +[baz] +baz@SUFFIX@ +#endif +''' + + +class TestPreprocessManifest(unittest.TestCase): + MANIFEST_PATH = os.path.join(os.path.abspath(os.curdir), 'manifest') + + EXPECTED_LOG = [ + ((MANIFEST_PATH, 2), 'add', '', 'bar/*'), + ((MANIFEST_PATH, 4), 'add', 'foo', 'foo/*'), + ((MANIFEST_PATH, 5), 'remove', 'foo', 'foo/bar'), + ((MANIFEST_PATH, 6), 'add', 'foo', 'chrome.manifest'), + ((MANIFEST_PATH, 8), 'add', 'zot destdir="destdir"', 'foo/zot'), + ] + + def setUp(self): + class MockSink(object): + def __init__(self): + self.log = [] + + def add(self, component, path): + self._log(errors.get_context(), 'add', repr(component), path) + + def remove(self, component, path): + self._log(errors.get_context(), 'remove', repr(component), path) + + def _log(self, *args): + self.log.append(args) + + self.sink = MockSink() + + def test_preprocess_manifest(self): + with MockedOpen({'manifest': MANIFEST}): + preprocess_manifest(self.sink, 'manifest') + self.assertEqual(self.sink.log, self.EXPECTED_LOG) + + def test_preprocess_manifest_missing_define(self): + with MockedOpen({'manifest': MANIFEST}): + self.assertRaises( + Preprocessor.Error, + preprocess_manifest, + self.sink, + 'manifest', + {'baz': 1} + ) + + def test_preprocess_manifest_defines(self): + with MockedOpen({'manifest': MANIFEST}): + preprocess_manifest(self.sink, 'manifest', + {'baz': 1, 'SUFFIX': '.exe'}) + self.assertEqual(self.sink.log, self.EXPECTED_LOG + + [((self.MANIFEST_PATH, 12), 'add', 'baz', 'baz.exe')]) + + +class MockFinder(object): + def __init__(self, files): + self.files = files + self.log = [] + + def find(self, path): + self.log.append(path) + for f in sorted(self.files): + if mozpath.match(f, path): + yield f, self.files[f] + + def __iter__(self): + return self.find('') + + +class MockFormatter(object): + def __init__(self): + self.log = [] + + def add_base(self, *args): + self._log(errors.get_context(), 'add_base', *args) + + def add_manifest(self, *args): + self._log(errors.get_context(), 'add_manifest', *args) + + def add_interfaces(self, *args): + self._log(errors.get_context(), 'add_interfaces', *args) + + def add(self, *args): + self._log(errors.get_context(), 'add', *args) + + def _log(self, *args): + self.log.append(args) + + +class TestSimplePackager(unittest.TestCase): + def test_simple_packager(self): + class GeneratedFileWithPath(GeneratedFile): + def __init__(self, path, content): + GeneratedFile.__init__(self, content) + self.path = path + + formatter = MockFormatter() + packager = SimplePackager(formatter) + curdir = os.path.abspath(os.curdir) + file = GeneratedFileWithPath(os.path.join(curdir, 'foo', + 'bar.manifest'), + 'resource bar bar/\ncontent bar bar/') + with errors.context('manifest', 1): + packager.add('foo/bar.manifest', file) + + file = GeneratedFileWithPath(os.path.join(curdir, 'foo', + 'baz.manifest'), + 'resource baz baz/') + with errors.context('manifest', 2): + packager.add('bar/baz.manifest', file) + + with errors.context('manifest', 3): + packager.add('qux/qux.manifest', + GeneratedFile(''.join([ + 'resource qux qux/\n', + 'binary-component qux.so\n', + ]))) + bar_xpt = GeneratedFile('bar.xpt') + qux_xpt = GeneratedFile('qux.xpt') + foo_html = GeneratedFile('foo_html') + bar_html = GeneratedFile('bar_html') + with errors.context('manifest', 4): + packager.add('foo/bar.xpt', bar_xpt) + with errors.context('manifest', 5): + packager.add('foo/bar/foo.html', foo_html) + packager.add('foo/bar/bar.html', bar_html) + + file = GeneratedFileWithPath(os.path.join(curdir, 'foo.manifest'), + ''.join([ + 'manifest foo/bar.manifest\n', + 'manifest bar/baz.manifest\n', + ])) + with errors.context('manifest', 6): + packager.add('foo.manifest', file) + with errors.context('manifest', 7): + packager.add('foo/qux.xpt', qux_xpt) + + file = GeneratedFileWithPath(os.path.join(curdir, 'addon', + 'chrome.manifest'), + 'resource hoge hoge/') + with errors.context('manifest', 8): + packager.add('addon/chrome.manifest', file) + + install_rdf = GeneratedFile('<RDF></RDF>') + with errors.context('manifest', 9): + packager.add('addon/install.rdf', install_rdf) + + with errors.context('manifest', 10): + packager.add('addon2/install.rdf', install_rdf) + packager.add('addon2/chrome.manifest', + GeneratedFile('binary-component addon2.so')) + + with errors.context('manifest', 11): + packager.add('addon3/install.rdf', install_rdf) + packager.add('addon3/chrome.manifest', GeneratedFile( + 'manifest components/components.manifest')) + packager.add('addon3/components/components.manifest', + GeneratedFile('binary-component addon3.so')) + + with errors.context('manifest', 12): + install_rdf_addon4 = GeneratedFile( + '<RDF>\n<...>\n<em:unpack>true</em:unpack>\n<...>\n</RDF>') + packager.add('addon4/install.rdf', install_rdf_addon4) + + with errors.context('manifest', 13): + install_rdf_addon5 = GeneratedFile( + '<RDF>\n<...>\n<em:unpack>false</em:unpack>\n<...>\n</RDF>') + packager.add('addon5/install.rdf', install_rdf_addon5) + + with errors.context('manifest', 14): + install_rdf_addon6 = GeneratedFile( + '<RDF>\n<... em:unpack=true>\n<...>\n</RDF>') + packager.add('addon6/install.rdf', install_rdf_addon6) + + with errors.context('manifest', 15): + install_rdf_addon7 = GeneratedFile( + '<RDF>\n<... em:unpack=false>\n<...>\n</RDF>') + packager.add('addon7/install.rdf', install_rdf_addon7) + + with errors.context('manifest', 16): + install_rdf_addon8 = GeneratedFile( + '<RDF>\n<... em:unpack="true">\n<...>\n</RDF>') + packager.add('addon8/install.rdf', install_rdf_addon8) + + with errors.context('manifest', 17): + install_rdf_addon9 = GeneratedFile( + '<RDF>\n<... em:unpack="false">\n<...>\n</RDF>') + packager.add('addon9/install.rdf', install_rdf_addon9) + + with errors.context('manifest', 18): + install_rdf_addon10 = GeneratedFile( + '<RDF>\n<... em:unpack=\'true\'>\n<...>\n</RDF>') + packager.add('addon10/install.rdf', install_rdf_addon10) + + with errors.context('manifest', 19): + install_rdf_addon11 = GeneratedFile( + '<RDF>\n<... em:unpack=\'false\'>\n<...>\n</RDF>') + packager.add('addon11/install.rdf', install_rdf_addon11) + + self.assertEqual(formatter.log, []) + + with errors.context('dummy', 1): + packager.close() + self.maxDiff = None + # The formatter is expected to reorder the manifest entries so that + # chrome entries appear before the others. + self.assertEqual(formatter.log, [ + (('dummy', 1), 'add_base', '', False), + (('dummy', 1), 'add_base', 'addon', True), + (('dummy', 1), 'add_base', 'addon10', 'unpacked'), + (('dummy', 1), 'add_base', 'addon11', True), + (('dummy', 1), 'add_base', 'addon2', 'unpacked'), + (('dummy', 1), 'add_base', 'addon3', 'unpacked'), + (('dummy', 1), 'add_base', 'addon4', 'unpacked'), + (('dummy', 1), 'add_base', 'addon5', True), + (('dummy', 1), 'add_base', 'addon6', 'unpacked'), + (('dummy', 1), 'add_base', 'addon7', True), + (('dummy', 1), 'add_base', 'addon8', 'unpacked'), + (('dummy', 1), 'add_base', 'addon9', True), + (('dummy', 1), 'add_base', 'qux', False), + ((os.path.join(curdir, 'foo', 'bar.manifest'), 2), + 'add_manifest', ManifestContent('foo', 'bar', 'bar/')), + ((os.path.join(curdir, 'foo', 'bar.manifest'), 1), + 'add_manifest', ManifestResource('foo', 'bar', 'bar/')), + (('bar/baz.manifest', 1), + 'add_manifest', ManifestResource('bar', 'baz', 'baz/')), + (('qux/qux.manifest', 1), + 'add_manifest', ManifestResource('qux', 'qux', 'qux/')), + (('qux/qux.manifest', 2), + 'add_manifest', ManifestBinaryComponent('qux', 'qux.so')), + (('manifest', 4), 'add_interfaces', 'foo/bar.xpt', bar_xpt), + (('manifest', 7), 'add_interfaces', 'foo/qux.xpt', qux_xpt), + ((os.path.join(curdir, 'addon', 'chrome.manifest'), 1), + 'add_manifest', ManifestResource('addon', 'hoge', 'hoge/')), + (('addon2/chrome.manifest', 1), 'add_manifest', + ManifestBinaryComponent('addon2', 'addon2.so')), + (('addon3/components/components.manifest', 1), 'add_manifest', + ManifestBinaryComponent('addon3/components', 'addon3.so')), + (('manifest', 5), 'add', 'foo/bar/foo.html', foo_html), + (('manifest', 5), 'add', 'foo/bar/bar.html', bar_html), + (('manifest', 9), 'add', 'addon/install.rdf', install_rdf), + (('manifest', 10), 'add', 'addon2/install.rdf', install_rdf), + (('manifest', 11), 'add', 'addon3/install.rdf', install_rdf), + (('manifest', 12), 'add', 'addon4/install.rdf', + install_rdf_addon4), + (('manifest', 13), 'add', 'addon5/install.rdf', + install_rdf_addon5), + (('manifest', 14), 'add', 'addon6/install.rdf', + install_rdf_addon6), + (('manifest', 15), 'add', 'addon7/install.rdf', + install_rdf_addon7), + (('manifest', 16), 'add', 'addon8/install.rdf', + install_rdf_addon8), + (('manifest', 17), 'add', 'addon9/install.rdf', + install_rdf_addon9), + (('manifest', 18), 'add', 'addon10/install.rdf', + install_rdf_addon10), + (('manifest', 19), 'add', 'addon11/install.rdf', + install_rdf_addon11), + ]) + + self.assertEqual(packager.get_bases(), + set(['', 'addon', 'addon2', 'addon3', 'addon4', + 'addon5', 'addon6', 'addon7', 'addon8', + 'addon9', 'addon10', 'addon11', 'qux'])) + self.assertEqual(packager.get_bases(addons=False), set(['', 'qux'])) + + def test_simple_packager_manifest_consistency(self): + formatter = MockFormatter() + # bar/ is detected as an addon because of install.rdf, but top-level + # includes a manifest inside bar/. + packager = SimplePackager(formatter) + packager.add('base.manifest', GeneratedFile( + 'manifest foo/bar.manifest\n' + 'manifest bar/baz.manifest\n' + )) + packager.add('foo/bar.manifest', GeneratedFile('resource bar bar')) + packager.add('bar/baz.manifest', GeneratedFile('resource baz baz')) + packager.add('bar/install.rdf', GeneratedFile('')) + + with self.assertRaises(ErrorMessage) as e: + packager.close() + + self.assertEqual(e.exception.message, + 'Error: "bar/baz.manifest" is included from "base.manifest", ' + 'which is outside "bar"') + + # bar/ is detected as a separate base because of chrome.manifest that + # is included nowhere, but top-level includes another manifest inside + # bar/. + packager = SimplePackager(formatter) + packager.add('base.manifest', GeneratedFile( + 'manifest foo/bar.manifest\n' + 'manifest bar/baz.manifest\n' + )) + packager.add('foo/bar.manifest', GeneratedFile('resource bar bar')) + packager.add('bar/baz.manifest', GeneratedFile('resource baz baz')) + packager.add('bar/chrome.manifest', GeneratedFile('resource baz baz')) + + with self.assertRaises(ErrorMessage) as e: + packager.close() + + self.assertEqual(e.exception.message, + 'Error: "bar/baz.manifest" is included from "base.manifest", ' + 'which is outside "bar"') + + # bar/ is detected as a separate base because of chrome.manifest that + # is included nowhere, but chrome.manifest includes baz.manifest from + # the same directory. This shouldn't error out. + packager = SimplePackager(formatter) + packager.add('base.manifest', GeneratedFile( + 'manifest foo/bar.manifest\n' + )) + packager.add('foo/bar.manifest', GeneratedFile('resource bar bar')) + packager.add('bar/baz.manifest', GeneratedFile('resource baz baz')) + packager.add('bar/chrome.manifest', + GeneratedFile('manifest baz.manifest')) + packager.close() + + +class TestSimpleManifestSink(unittest.TestCase): + def test_simple_manifest_parser(self): + formatter = MockFormatter() + foobar = GeneratedFile('foobar') + foobaz = GeneratedFile('foobaz') + fooqux = GeneratedFile('fooqux') + foozot = GeneratedFile('foozot') + finder = MockFinder({ + 'bin/foo/bar': foobar, + 'bin/foo/baz': foobaz, + 'bin/foo/qux': fooqux, + 'bin/foo/zot': foozot, + 'bin/foo/chrome.manifest': GeneratedFile('resource foo foo/'), + 'bin/chrome.manifest': + GeneratedFile('manifest foo/chrome.manifest'), + }) + parser = SimpleManifestSink(finder, formatter) + component0 = Component('component0') + component1 = Component('component1') + component2 = Component('component2', destdir='destdir') + parser.add(component0, 'bin/foo/b*') + parser.add(component1, 'bin/foo/qux') + parser.add(component1, 'bin/foo/chrome.manifest') + parser.add(component2, 'bin/foo/zot') + self.assertRaises(ErrorMessage, parser.add, 'component1', 'bin/bar') + + self.assertEqual(formatter.log, []) + parser.close() + self.assertEqual(formatter.log, [ + (None, 'add_base', '', False), + (('foo/chrome.manifest', 1), + 'add_manifest', ManifestResource('foo', 'foo', 'foo/')), + (None, 'add', 'foo/bar', foobar), + (None, 'add', 'foo/baz', foobaz), + (None, 'add', 'foo/qux', fooqux), + (None, 'add', 'destdir/foo/zot', foozot), + ]) + + self.assertEqual(finder.log, [ + 'bin/foo/b*', + 'bin/foo/qux', + 'bin/foo/chrome.manifest', + 'bin/foo/zot', + 'bin/bar', + 'bin/chrome.manifest' + ]) + + +class TestCallDeque(unittest.TestCase): + def test_call_deque(self): + class Logger(object): + def __init__(self): + self._log = [] + + def log(self, str): + self._log.append(str) + + @staticmethod + def staticlog(logger, str): + logger.log(str) + + def do_log(logger, str): + logger.log(str) + + logger = Logger() + d = CallDeque() + d.append(logger.log, 'foo') + d.append(logger.log, 'bar') + d.append(logger.staticlog, logger, 'baz') + d.append(do_log, logger, 'qux') + self.assertEqual(logger._log, []) + d.execute() + self.assertEqual(logger._log, ['foo', 'bar', 'baz', 'qux']) + + +class TestComponent(unittest.TestCase): + def do_split(self, string, name, options): + n, o = Component._split_component_and_options(string) + self.assertEqual(name, n) + self.assertEqual(options, o) + + def test_component_split_component_and_options(self): + self.do_split('component', 'component', {}) + self.do_split('trailingspace ', 'trailingspace', {}) + self.do_split(' leadingspace', 'leadingspace', {}) + self.do_split(' trim ', 'trim', {}) + self.do_split(' trim key="value"', 'trim', {'key':'value'}) + self.do_split(' trim empty=""', 'trim', {'empty':''}) + self.do_split(' trim space=" "', 'trim', {'space':' '}) + self.do_split('component key="value" key2="second" ', + 'component', {'key':'value', 'key2':'second'}) + self.do_split( 'trim key=" value with spaces " key2="spaces again"', + 'trim', {'key':' value with spaces ', 'key2': 'spaces again'}) + + def do_split_error(self, string): + self.assertRaises(ValueError, Component._split_component_and_options, string) + + def test_component_split_component_and_options_errors(self): + self.do_split_error('"component') + self.do_split_error('comp"onent') + self.do_split_error('component"') + self.do_split_error('"component"') + self.do_split_error('=component') + self.do_split_error('comp=onent') + self.do_split_error('component=') + self.do_split_error('key="val"') + self.do_split_error('component key=') + self.do_split_error('component key="val') + self.do_split_error('component key=val"') + self.do_split_error('component key="val" x') + self.do_split_error('component x key="val"') + self.do_split_error('component key1="val" x key2="val"') + + def do_from_string(self, string, name, destdir=''): + component = Component.from_string(string) + self.assertEqual(name, component.name) + self.assertEqual(destdir, component.destdir) + + def test_component_from_string(self): + self.do_from_string('component', 'component') + self.do_from_string('component-with-hyphen', 'component-with-hyphen') + self.do_from_string('component destdir="foo/bar"', 'component', 'foo/bar') + self.do_from_string('component destdir="bar spc"', 'component', 'bar spc') + self.assertRaises(ErrorMessage, Component.from_string, '') + self.assertRaises(ErrorMessage, Component.from_string, 'component novalue=') + self.assertRaises(ErrorMessage, Component.from_string, 'component badoption=badvalue') + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py new file mode 100644 index 000000000..1af4336b2 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager_formats.py @@ -0,0 +1,428 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import mozunit +import unittest +from mozpack.packager.formats import ( + FlatFormatter, + JarFormatter, + OmniJarFormatter, +) +from mozpack.copier import FileRegistry +from mozpack.files import ( + GeneratedFile, + ManifestFile, +) +from mozpack.chrome.manifest import ( + ManifestContent, + ManifestComponent, + ManifestResource, + ManifestBinaryComponent, +) +from mozpack.test.test_files import ( + MockDest, + foo_xpt, + foo2_xpt, + bar_xpt, + read_interfaces, +) +import mozpack.path as mozpath + + +CONTENTS = { + 'bases': { + # base_path: is_addon? + '': False, + 'app': False, + 'addon0': 'unpacked', + 'addon1': True, + }, + 'manifests': [ + ManifestContent('chrome/f', 'oo', 'oo/'), + ManifestContent('chrome/f', 'bar', 'oo/bar/'), + ManifestResource('chrome/f', 'foo', 'resource://bar/'), + ManifestBinaryComponent('components', 'foo.so'), + ManifestContent('app/chrome', 'content', 'foo/'), + ManifestComponent('app/components', '{foo-id}', 'foo.js'), + ManifestContent('addon0/chrome', 'content', 'foo/bar/'), + ManifestContent('addon1/chrome', 'content', 'foo/bar/'), + ], + 'files': { + 'chrome/f/oo/bar/baz': GeneratedFile('foobarbaz'), + 'chrome/f/oo/baz': GeneratedFile('foobaz'), + 'chrome/f/oo/qux': GeneratedFile('fooqux'), + 'components/foo.so': GeneratedFile('foo.so'), + 'components/foo.xpt': foo_xpt, + 'components/bar.xpt': bar_xpt, + 'foo': GeneratedFile('foo'), + 'app/chrome/foo/foo': GeneratedFile('appfoo'), + 'app/components/foo.js': GeneratedFile('foo.js'), + 'addon0/chrome/foo/bar/baz': GeneratedFile('foobarbaz'), + 'addon0/components/foo.xpt': foo2_xpt, + 'addon0/components/bar.xpt': bar_xpt, + 'addon1/chrome/foo/bar/baz': GeneratedFile('foobarbaz'), + 'addon1/components/foo.xpt': foo2_xpt, + 'addon1/components/bar.xpt': bar_xpt, + }, +} + +FILES = CONTENTS['files'] + +RESULT_FLAT = { + 'chrome.manifest': [ + 'manifest chrome/chrome.manifest', + 'manifest components/components.manifest', + ], + 'chrome/chrome.manifest': [ + 'manifest f/f.manifest', + ], + 'chrome/f/f.manifest': [ + 'content oo oo/', + 'content bar oo/bar/', + 'resource foo resource://bar/', + ], + 'chrome/f/oo/bar/baz': FILES['chrome/f/oo/bar/baz'], + 'chrome/f/oo/baz': FILES['chrome/f/oo/baz'], + 'chrome/f/oo/qux': FILES['chrome/f/oo/qux'], + 'components/components.manifest': [ + 'binary-component foo.so', + 'interfaces interfaces.xpt', + ], + 'components/foo.so': FILES['components/foo.so'], + 'components/interfaces.xpt': { + 'foo': read_interfaces(foo_xpt.open())['foo'], + 'bar': read_interfaces(bar_xpt.open())['bar'], + }, + 'foo': FILES['foo'], + 'app/chrome.manifest': [ + 'manifest chrome/chrome.manifest', + 'manifest components/components.manifest', + ], + 'app/chrome/chrome.manifest': [ + 'content content foo/', + ], + 'app/chrome/foo/foo': FILES['app/chrome/foo/foo'], + 'app/components/components.manifest': [ + 'component {foo-id} foo.js', + ], + 'app/components/foo.js': FILES['app/components/foo.js'], +} + +for addon in ('addon0', 'addon1'): + RESULT_FLAT.update({ + mozpath.join(addon, p): f + for p, f in { + 'chrome.manifest': [ + 'manifest chrome/chrome.manifest', + 'manifest components/components.manifest', + ], + 'chrome/chrome.manifest': [ + 'content content foo/bar/', + ], + 'chrome/foo/bar/baz': FILES[mozpath.join(addon, 'chrome/foo/bar/baz')], + 'components/components.manifest': [ + 'interfaces interfaces.xpt', + ], + 'components/interfaces.xpt': { + 'foo': read_interfaces(foo2_xpt.open())['foo'], + 'bar': read_interfaces(bar_xpt.open())['bar'], + }, + }.iteritems() + }) + +RESULT_JAR = { + p: RESULT_FLAT[p] + for p in ( + 'chrome.manifest', + 'chrome/chrome.manifest', + 'components/components.manifest', + 'components/foo.so', + 'components/interfaces.xpt', + 'foo', + 'app/chrome.manifest', + 'app/components/components.manifest', + 'app/components/foo.js', + 'addon0/chrome.manifest', + 'addon0/components/components.manifest', + 'addon0/components/interfaces.xpt', + ) +} + +RESULT_JAR.update({ + 'chrome/f/f.manifest': [ + 'content oo jar:oo.jar!/', + 'content bar jar:oo.jar!/bar/', + 'resource foo resource://bar/', + ], + 'chrome/f/oo.jar': { + 'bar/baz': FILES['chrome/f/oo/bar/baz'], + 'baz': FILES['chrome/f/oo/baz'], + 'qux': FILES['chrome/f/oo/qux'], + }, + 'app/chrome/chrome.manifest': [ + 'content content jar:foo.jar!/', + ], + 'app/chrome/foo.jar': { + 'foo': FILES['app/chrome/foo/foo'], + }, + 'addon0/chrome/chrome.manifest': [ + 'content content jar:foo.jar!/bar/', + ], + 'addon0/chrome/foo.jar': { + 'bar/baz': FILES['addon0/chrome/foo/bar/baz'], + }, + 'addon1.xpi': { + mozpath.relpath(p, 'addon1'): f + for p, f in RESULT_FLAT.iteritems() + if p.startswith('addon1/') + }, +}) + +RESULT_OMNIJAR = { + p: RESULT_FLAT[p] + for p in ( + 'components/foo.so', + 'foo', + ) +} + +RESULT_OMNIJAR.update({ + p: RESULT_JAR[p] + for p in RESULT_JAR + if p.startswith('addon') +}) + +RESULT_OMNIJAR.update({ + 'omni.foo': { + 'components/components.manifest': [ + 'interfaces interfaces.xpt', + ], + }, + 'chrome.manifest': [ + 'manifest components/components.manifest', + ], + 'components/components.manifest': [ + 'binary-component foo.so', + ], + 'app/omni.foo': { + p: RESULT_FLAT['app/' + p] + for p in ( + 'chrome.manifest', + 'chrome/chrome.manifest', + 'chrome/foo/foo', + 'components/components.manifest', + 'components/foo.js', + ) + }, + 'app/chrome.manifest': [], +}) + +RESULT_OMNIJAR['omni.foo'].update({ + p: RESULT_FLAT[p] + for p in ( + 'chrome.manifest', + 'chrome/chrome.manifest', + 'chrome/f/f.manifest', + 'chrome/f/oo/bar/baz', + 'chrome/f/oo/baz', + 'chrome/f/oo/qux', + 'components/interfaces.xpt', + ) +}) + +CONTENTS_WITH_BASE = { + 'bases': { + mozpath.join('base/root', b) if b else 'base/root': a + for b, a in CONTENTS['bases'].iteritems() + }, + 'manifests': [ + m.move(mozpath.join('base/root', m.base)) + for m in CONTENTS['manifests'] + ], + 'files': { + mozpath.join('base/root', p): f + for p, f in CONTENTS['files'].iteritems() + }, +} + +EXTRA_CONTENTS = { + 'extra/file': GeneratedFile('extra file'), +} + +CONTENTS_WITH_BASE['files'].update(EXTRA_CONTENTS) + +def result_with_base(results): + result = { + mozpath.join('base/root', p): v + for p, v in results.iteritems() + } + result.update(EXTRA_CONTENTS) + return result + +RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT) +RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR) +RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR) + + +class MockDest(MockDest): + def exists(self): + return False + + +def fill_formatter(formatter, contents): + for base, is_addon in contents['bases'].items(): + formatter.add_base(base, is_addon) + + for manifest in contents['manifests']: + formatter.add_manifest(manifest) + + for k, v in contents['files'].iteritems(): + if k.endswith('.xpt'): + formatter.add_interfaces(k, v) + else: + formatter.add(k, v) + + +def get_contents(registry, read_all=False): + result = {} + for k, v in registry: + if k.endswith('.xpt'): + tmpfile = MockDest() + registry[k].copy(tmpfile) + result[k] = read_interfaces(tmpfile) + elif isinstance(v, FileRegistry): + result[k] = get_contents(v) + elif isinstance(v, ManifestFile) or read_all: + result[k] = v.open().read().splitlines() + else: + result[k] = v + return result + + +class TestFormatters(unittest.TestCase): + maxDiff = None + + def test_bases(self): + formatter = FlatFormatter(FileRegistry()) + formatter.add_base('') + formatter.add_base('browser') + formatter.add_base('addon0', addon=True) + self.assertEqual(formatter._get_base('platform.ini'), + ('', 'platform.ini')) + self.assertEqual(formatter._get_base('browser/application.ini'), + ('browser', 'application.ini')) + self.assertEqual(formatter._get_base('addon0/install.rdf'), + ('addon0', 'install.rdf')) + + def do_test_contents(self, formatter, contents): + for f in contents['files']: + # .xpt files are merged, so skip them. + if not f.endswith('.xpt'): + self.assertTrue(formatter.contains(f)) + + def test_flat_formatter(self): + registry = FileRegistry() + formatter = FlatFormatter(registry) + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_FLAT) + self.do_test_contents(formatter, CONTENTS) + + def test_jar_formatter(self): + registry = FileRegistry() + formatter = JarFormatter(registry) + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_JAR) + self.do_test_contents(formatter, CONTENTS) + + def test_omnijar_formatter(self): + registry = FileRegistry() + formatter = OmniJarFormatter(registry, 'omni.foo') + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_OMNIJAR) + self.do_test_contents(formatter, CONTENTS) + + def test_flat_formatter_with_base(self): + registry = FileRegistry() + formatter = FlatFormatter(registry) + + fill_formatter(formatter, CONTENTS_WITH_BASE) + self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE) + self.do_test_contents(formatter, CONTENTS_WITH_BASE) + + def test_jar_formatter_with_base(self): + registry = FileRegistry() + formatter = JarFormatter(registry) + + fill_formatter(formatter, CONTENTS_WITH_BASE) + self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE) + self.do_test_contents(formatter, CONTENTS_WITH_BASE) + + def test_omnijar_formatter_with_base(self): + registry = FileRegistry() + formatter = OmniJarFormatter(registry, 'omni.foo') + + fill_formatter(formatter, CONTENTS_WITH_BASE) + self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE) + self.do_test_contents(formatter, CONTENTS_WITH_BASE) + + def test_omnijar_is_resource(self): + def is_resource(base, path): + registry = FileRegistry() + f = OmniJarFormatter(registry, 'omni.foo', non_resources=[ + 'defaults/messenger/mailViews.dat', + 'defaults/foo/*', + '*/dummy', + ]) + f.add_base('') + f.add_base('app') + f.add(mozpath.join(base, path), GeneratedFile('')) + if f.copier.contains(mozpath.join(base, path)): + return False + self.assertTrue(f.copier.contains(mozpath.join(base, 'omni.foo'))) + self.assertTrue(f.copier[mozpath.join(base, 'omni.foo')] + .contains(path)) + return True + + for base in ['', 'app/']: + self.assertTrue(is_resource(base, 'chrome')) + self.assertTrue( + is_resource(base, 'chrome/foo/bar/baz.properties')) + self.assertFalse(is_resource(base, 'chrome/icons/foo.png')) + self.assertTrue(is_resource(base, 'components/foo.js')) + self.assertFalse(is_resource(base, 'components/foo.so')) + self.assertTrue(is_resource(base, 'res/foo.css')) + self.assertFalse(is_resource(base, 'res/cursors/foo.png')) + self.assertFalse(is_resource(base, 'res/MainMenu.nib/foo')) + self.assertTrue(is_resource(base, 'defaults/pref/foo.js')) + self.assertFalse( + is_resource(base, 'defaults/pref/channel-prefs.js')) + self.assertTrue( + is_resource(base, 'defaults/preferences/foo.js')) + self.assertFalse( + is_resource(base, 'defaults/preferences/channel-prefs.js')) + self.assertTrue(is_resource(base, 'modules/foo.jsm')) + self.assertTrue(is_resource(base, 'greprefs.js')) + self.assertTrue(is_resource(base, 'hyphenation/foo')) + self.assertTrue(is_resource(base, 'update.locale')) + self.assertTrue( + is_resource(base, 'jsloader/resource/gre/modules/foo.jsm')) + self.assertFalse(is_resource(base, 'foo')) + self.assertFalse(is_resource(base, 'foo/bar/greprefs.js')) + self.assertTrue(is_resource(base, 'defaults/messenger/foo.dat')) + self.assertFalse( + is_resource(base, 'defaults/messenger/mailViews.dat')) + self.assertTrue(is_resource(base, 'defaults/pref/foo.js')) + self.assertFalse(is_resource(base, 'defaults/foo/bar.dat')) + self.assertFalse(is_resource(base, 'defaults/foo/bar/baz.dat')) + self.assertTrue(is_resource(base, 'chrome/foo/bar/baz/dummy_')) + self.assertFalse(is_resource(base, 'chrome/foo/bar/baz/dummy')) + self.assertTrue(is_resource(base, 'chrome/foo/bar/dummy_')) + self.assertFalse(is_resource(base, 'chrome/foo/bar/dummy')) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py new file mode 100644 index 000000000..c797eadd1 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager_l10n.py @@ -0,0 +1,126 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest +import mozunit +from test_packager import MockFinder +from mozpack.packager import l10n +from mozpack.files import ( + GeneratedFile, + ManifestFile, +) +from mozpack.chrome.manifest import ( + Manifest, + ManifestLocale, + ManifestContent, +) +from mozpack.copier import FileRegistry +from mozpack.packager.formats import FlatFormatter + + +class TestL10NRepack(unittest.TestCase): + def test_l10n_repack(self): + foo = GeneratedFile('foo') + foobar = GeneratedFile('foobar') + qux = GeneratedFile('qux') + bar = GeneratedFile('bar') + baz = GeneratedFile('baz') + dict_aa = GeneratedFile('dict_aa') + dict_bb = GeneratedFile('dict_bb') + dict_cc = GeneratedFile('dict_cc') + barbaz = GeneratedFile('barbaz') + lst = GeneratedFile('foo\nbar') + app_finder = MockFinder({ + 'bar/foo': foo, + 'chrome/foo/foobar': foobar, + 'chrome/qux/qux.properties': qux, + 'chrome/qux/baz/baz.properties': baz, + 'chrome/chrome.manifest': ManifestFile('chrome', [ + ManifestContent('chrome', 'foo', 'foo/'), + ManifestLocale('chrome', 'qux', 'en-US', 'qux/'), + ]), + 'chrome.manifest': + ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]), + 'dict/aa': dict_aa, + 'app/chrome/bar/barbaz.dtd': barbaz, + 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [ + ManifestLocale('app/chrome', 'bar', 'en-US', 'bar/') + ]), + 'app/chrome.manifest': + ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]), + 'app/dict/bb': dict_bb, + 'app/dict/cc': dict_cc, + 'app/chrome/bar/search/foo.xml': foo, + 'app/chrome/bar/search/bar.xml': bar, + 'app/chrome/bar/search/lst.txt': lst, + }) + app_finder.jarlogs = {} + app_finder.base = 'app' + foo_l10n = GeneratedFile('foo_l10n') + qux_l10n = GeneratedFile('qux_l10n') + baz_l10n = GeneratedFile('baz_l10n') + barbaz_l10n = GeneratedFile('barbaz_l10n') + lst_l10n = GeneratedFile('foo\nqux') + l10n_finder = MockFinder({ + 'chrome/qux-l10n/qux.properties': qux_l10n, + 'chrome/qux-l10n/baz/baz.properties': baz_l10n, + 'chrome/chrome.manifest': ManifestFile('chrome', [ + ManifestLocale('chrome', 'qux', 'x-test', 'qux-l10n/'), + ]), + 'chrome.manifest': + ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]), + 'dict/bb': dict_bb, + 'dict/cc': dict_cc, + 'app/chrome/bar-l10n/barbaz.dtd': barbaz_l10n, + 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [ + ManifestLocale('app/chrome', 'bar', 'x-test', 'bar-l10n/') + ]), + 'app/chrome.manifest': + ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]), + 'app/dict/aa': dict_aa, + 'app/chrome/bar-l10n/search/foo.xml': foo_l10n, + 'app/chrome/bar-l10n/search/qux.xml': qux_l10n, + 'app/chrome/bar-l10n/search/lst.txt': lst_l10n, + }) + l10n_finder.base = 'l10n' + copier = FileRegistry() + formatter = FlatFormatter(copier) + + l10n._repack(app_finder, l10n_finder, copier, formatter, + ['dict', 'chrome/**/search/*.xml']) + self.maxDiff = None + + repacked = { + 'bar/foo': foo, + 'chrome/foo/foobar': foobar, + 'chrome/qux-l10n/qux.properties': qux_l10n, + 'chrome/qux-l10n/baz/baz.properties': baz_l10n, + 'chrome/chrome.manifest': ManifestFile('chrome', [ + ManifestContent('chrome', 'foo', 'foo/'), + ManifestLocale('chrome', 'qux', 'x-test', 'qux-l10n/'), + ]), + 'chrome.manifest': + ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]), + 'dict/bb': dict_bb, + 'dict/cc': dict_cc, + 'app/chrome/bar-l10n/barbaz.dtd': barbaz_l10n, + 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [ + ManifestLocale('app/chrome', 'bar', 'x-test', 'bar-l10n/') + ]), + 'app/chrome.manifest': + ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]), + 'app/dict/aa': dict_aa, + 'app/chrome/bar-l10n/search/foo.xml': foo_l10n, + 'app/chrome/bar-l10n/search/qux.xml': qux_l10n, + 'app/chrome/bar-l10n/search/lst.txt': lst_l10n, + } + + self.assertEqual( + dict((p, f.open().read()) for p, f in copier), + dict((p, f.open().read()) for p, f in repacked.iteritems()) + ) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py new file mode 100644 index 000000000..d201cabf7 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager_unpack.py @@ -0,0 +1,65 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import mozunit +from mozpack.packager.formats import ( + FlatFormatter, + JarFormatter, + OmniJarFormatter, +) +from mozpack.packager.unpack import unpack_to_registry +from mozpack.copier import ( + FileCopier, + FileRegistry, +) +from mozpack.test.test_packager_formats import ( + CONTENTS, + fill_formatter, + get_contents, +) +from mozpack.test.test_files import TestWithTmpDir + + +class TestUnpack(TestWithTmpDir): + maxDiff = None + + @staticmethod + def _get_copier(cls): + copier = FileCopier() + formatter = cls(copier) + fill_formatter(formatter, CONTENTS) + return copier + + @classmethod + def setUpClass(cls): + cls.contents = get_contents(cls._get_copier(FlatFormatter), + read_all=True) + + def _unpack_test(self, cls): + # Format a package with the given formatter class + copier = self._get_copier(cls) + copier.copy(self.tmpdir) + + # Unpack that package. Its content is expected to match that of a Flat + # formatted package. + registry = FileRegistry() + unpack_to_registry(self.tmpdir, registry) + self.assertEqual(get_contents(registry, read_all=True), self.contents) + + def test_flat_unpack(self): + self._unpack_test(FlatFormatter) + + def test_jar_unpack(self): + self._unpack_test(JarFormatter) + + def test_omnijar_unpack(self): + class OmniFooFormatter(OmniJarFormatter): + def __init__(self, registry): + super(OmniFooFormatter, self).__init__(registry, 'omni.foo') + + self._unpack_test(OmniFooFormatter) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py new file mode 100644 index 000000000..ee41e4a69 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_path.py @@ -0,0 +1,143 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozpack.path import ( + relpath, + join, + normpath, + dirname, + commonprefix, + basename, + split, + splitext, + basedir, + match, + rebase, +) +import unittest +import mozunit +import os + + +class TestPath(unittest.TestCase): + SEP = os.sep + + def test_relpath(self): + self.assertEqual(relpath('foo', 'foo'), '') + self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar'), '') + self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo'), 'bar') + self.assertEqual(relpath(self.SEP.join(('foo', 'bar', 'baz')), 'foo'), + 'bar/baz') + self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar/baz'), + '..') + self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/baz'), + '../bar') + self.assertEqual(relpath('foo/', 'foo'), '') + self.assertEqual(relpath('foo/bar/', 'foo'), 'bar') + + def test_join(self): + self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz') + self.assertEqual(join('foo', '', 'bar'), 'foo/bar') + self.assertEqual(join('', 'foo', 'bar'), 'foo/bar') + self.assertEqual(join('', 'foo', '/bar'), '/bar') + + def test_normpath(self): + self.assertEqual(normpath(self.SEP.join(('foo', 'bar', 'baz', + '..', 'qux'))), 'foo/bar/qux') + + def test_dirname(self): + self.assertEqual(dirname('foo/bar/baz'), 'foo/bar') + self.assertEqual(dirname('foo/bar'), 'foo') + self.assertEqual(dirname('foo'), '') + self.assertEqual(dirname('foo/bar/'), 'foo/bar') + + def test_commonprefix(self): + self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')), + 'foo/qux', 'foo/baz/qux']), 'foo/') + self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')), + 'foo/qux', 'baz/qux']), '') + + def test_basename(self): + self.assertEqual(basename('foo/bar/baz'), 'baz') + self.assertEqual(basename('foo/bar'), 'bar') + self.assertEqual(basename('foo'), 'foo') + self.assertEqual(basename('foo/bar/'), '') + + def test_split(self): + self.assertEqual(split(self.SEP.join(('foo', 'bar', 'baz'))), + ['foo', 'bar', 'baz']) + + def test_splitext(self): + self.assertEqual(splitext(self.SEP.join(('foo', 'bar', 'baz.qux'))), + ('foo/bar/baz', '.qux')) + + def test_basedir(self): + foobarbaz = self.SEP.join(('foo', 'bar', 'baz')) + self.assertEqual(basedir(foobarbaz, ['foo', 'bar', 'baz']), 'foo') + self.assertEqual(basedir(foobarbaz, ['foo', 'foo/bar', 'baz']), + 'foo/bar') + self.assertEqual(basedir(foobarbaz, ['foo/bar', 'foo', 'baz']), + 'foo/bar') + self.assertEqual(basedir(foobarbaz, ['foo', 'bar', '']), 'foo') + self.assertEqual(basedir(foobarbaz, ['bar', 'baz', '']), '') + + def test_match(self): + self.assertTrue(match('foo', '')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/bar')) + self.assertTrue(match('foo/bar/baz.qux', 'foo')) + self.assertTrue(match('foo', '*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', '*/*/*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux')) + self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux')) + self.assertTrue(match('foo/bar/baz.qux', '**')) + self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux')) + self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux')) + self.assertTrue(match('foo/bar/baz.qux', '**/*.qux')) + self.assertFalse(match('foo/bar/baz.qux', '**.qux')) + self.assertFalse(match('foo/bar', 'foo/*/bar')) + self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/**')) + self.assertFalse(match('foo/nobar/baz.qux', 'foo/**/bar/**')) + self.assertTrue(match('foo/bar', 'foo/**/bar/**')) + + def test_rebase(self): + self.assertEqual(rebase('foo', 'foo/bar', 'bar/baz'), 'baz') + self.assertEqual(rebase('foo', 'foo', 'bar/baz'), 'bar/baz') + self.assertEqual(rebase('foo/bar', 'foo', 'baz'), 'bar/baz') + + +if os.altsep: + class TestAltPath(TestPath): + SEP = os.altsep + + class TestReverseAltPath(TestPath): + def setUp(self): + sep = os.sep + os.sep = os.altsep + os.altsep = sep + + def tearDown(self): + self.setUp() + + class TestAltReverseAltPath(TestReverseAltPath): + SEP = os.altsep + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py new file mode 100644 index 000000000..a2bbb4470 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_unify.py @@ -0,0 +1,199 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozbuild.util import ensureParentDir + +from mozpack.unify import ( + UnifiedFinder, + UnifiedBuildFinder, +) +import mozunit +from mozpack.test.test_files import TestWithTmpDir +from mozpack.files import FileFinder +from mozpack.mozjar import JarWriter +from mozpack.test.test_files import MockDest +from cStringIO import StringIO +import os +import sys +from mozpack.errors import ( + ErrorMessage, + AccumulatedErrors, + errors, +) + + +class TestUnified(TestWithTmpDir): + def create_one(self, which, path, content): + file = self.tmppath(os.path.join(which, path)) + ensureParentDir(file) + open(file, 'wb').write(content) + + def create_both(self, path, content): + for p in ['a', 'b']: + self.create_one(p, path, content) + + +class TestUnifiedFinder(TestUnified): + def test_unified_finder(self): + self.create_both('foo/bar', 'foobar') + self.create_both('foo/baz', 'foobaz') + self.create_one('a', 'bar', 'bar') + self.create_one('b', 'baz', 'baz') + self.create_one('a', 'qux', 'foobar') + self.create_one('b', 'qux', 'baz') + self.create_one('a', 'test/foo', 'a\nb\nc\n') + self.create_one('b', 'test/foo', 'b\nc\na\n') + self.create_both('test/bar', 'a\nb\nc\n') + + finder = UnifiedFinder(FileFinder(self.tmppath('a')), + FileFinder(self.tmppath('b')), + sorted=['test']) + self.assertEqual(sorted([(f, c.open().read()) + for f, c in finder.find('foo')]), + [('foo/bar', 'foobar'), ('foo/baz', 'foobaz')]) + self.assertRaises(ErrorMessage, any, finder.find('bar')) + self.assertRaises(ErrorMessage, any, finder.find('baz')) + self.assertRaises(ErrorMessage, any, finder.find('qux')) + self.assertEqual(sorted([(f, c.open().read()) + for f, c in finder.find('test')]), + [('test/bar', 'a\nb\nc\n'), + ('test/foo', 'a\nb\nc\n')]) + + +class TestUnifiedBuildFinder(TestUnified): + def test_unified_build_finder(self): + finder = UnifiedBuildFinder(FileFinder(self.tmppath('a')), + FileFinder(self.tmppath('b'))) + + # Test chrome.manifest unification + self.create_both('chrome.manifest', 'a\nb\nc\n') + self.create_one('a', 'chrome/chrome.manifest', 'a\nb\nc\n') + self.create_one('b', 'chrome/chrome.manifest', 'b\nc\na\n') + self.assertEqual(sorted([(f, c.open().read()) for f, c in + finder.find('**/chrome.manifest')]), + [('chrome.manifest', 'a\nb\nc\n'), + ('chrome/chrome.manifest', 'a\nb\nc\n')]) + + # Test buildconfig.html unification + self.create_one('a', 'chrome/browser/foo/buildconfig.html', + '\n'.join([ + '<html>', + '<body>', + '<h1>about:buildconfig</h1>', + '<div>foo</div>', + '</body>', + '</html>', + ])) + self.create_one('b', 'chrome/browser/foo/buildconfig.html', + '\n'.join([ + '<html>', + '<body>', + '<h1>about:buildconfig</h1>', + '<div>bar</div>', + '</body>', + '</html>', + ])) + self.assertEqual(sorted([(f, c.open().read()) for f, c in + finder.find('**/buildconfig.html')]), + [('chrome/browser/foo/buildconfig.html', '\n'.join([ + '<html>', + '<body>', + '<h1>about:buildconfig</h1>', + '<div>foo</div>', + '<hr> </hr>', + '<div>bar</div>', + '</body>', + '</html>', + ]))]) + + # Test xpi file unification + xpi = MockDest() + with JarWriter(fileobj=xpi, compress=True) as jar: + jar.add('foo', 'foo') + jar.add('bar', 'bar') + foo_xpi = xpi.read() + self.create_both('foo.xpi', foo_xpi) + + with JarWriter(fileobj=xpi, compress=True) as jar: + jar.add('foo', 'bar') + self.create_one('a', 'bar.xpi', foo_xpi) + self.create_one('b', 'bar.xpi', xpi.read()) + + errors.out = StringIO() + with self.assertRaises(AccumulatedErrors), errors.accumulate(): + self.assertEqual([(f, c.open().read()) for f, c in + finder.find('*.xpi')], + [('foo.xpi', foo_xpi)]) + errors.out = sys.stderr + + # Test install.rdf unification + x86_64 = 'Darwin_x86_64-gcc3' + x86 = 'Darwin_x86-gcc3' + target_tag = '<{em}targetPlatform>{platform}</{em}targetPlatform>' + target_attr = '{em}targetPlatform="{platform}" ' + + rdf_tag = ''.join([ + '<{RDF}Description {em}bar="bar" {em}qux="qux">', + '<{em}foo>foo</{em}foo>', + '{targets}', + '<{em}baz>baz</{em}baz>', + '</{RDF}Description>' + ]) + rdf_attr = ''.join([ + '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">', + '{targets}', + '<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>', + '</{RDF}Description>' + ]) + + for descr_ns, target_ns in (('RDF:', ''), ('', 'em:'), ('RDF:', 'em:')): + # First we need to infuse the above strings with our namespaces and + # platform values. + ns = { 'RDF': descr_ns, 'em': target_ns } + target_tag_x86_64 = target_tag.format(platform=x86_64, **ns) + target_tag_x86 = target_tag.format(platform=x86, **ns) + target_attr_x86_64 = target_attr.format(platform=x86_64, **ns) + target_attr_x86 = target_attr.format(platform=x86, **ns) + + tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns) + tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns) + tag_merged = rdf_tag.format(targets=target_tag_x86_64 + target_tag_x86, **ns) + tag_empty = rdf_tag.format(targets="", **ns) + + attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns) + attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns) + attr_merged = rdf_attr.format(attr="", targets=target_tag_x86_64 + target_tag_x86, **ns) + + # This table defines the test cases, columns "a" and "b" being the + # contents of the install.rdf of the respective platform and + # "result" the exepected merged content after unification. + testcases = ( + #_____a_____ _____b_____ ___result___# + (tag_x86_64, tag_x86, tag_merged ), + (tag_x86_64, tag_empty, tag_empty ), + (tag_empty, tag_x86, tag_empty ), + (tag_empty, tag_empty, tag_empty ), + + (attr_x86_64, attr_x86, attr_merged ), + (tag_x86_64, attr_x86, tag_merged ), + (attr_x86_64, tag_x86, attr_merged ), + + (attr_x86_64, tag_empty, tag_empty ), + (tag_empty, attr_x86, tag_empty ) + ) + + # Now create the files from the above table and compare + results = [] + for emid, (rdf_a, rdf_b, result) in enumerate(testcases): + filename = 'ext/id{0}/install.rdf'.format(emid) + self.create_one('a', filename, rdf_a) + self.create_one('b', filename, rdf_b) + results.append((filename, result)) + + self.assertEqual(sorted([(f, c.open().read()) for f, c in + finder.find('**/install.rdf')]), results) + + +if __name__ == '__main__': + mozunit.main() diff --git a/python/mozbuild/mozpack/unify.py b/python/mozbuild/mozpack/unify.py new file mode 100644 index 000000000..3c8a8d605 --- /dev/null +++ b/python/mozbuild/mozpack/unify.py @@ -0,0 +1,231 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +from mozpack.files import ( + BaseFinder, + JarFinder, + ExecutableFile, + BaseFile, + GeneratedFile, +) +from mozpack.executables import ( + MACHO_SIGNATURES, +) +from mozpack.mozjar import JarReader +from mozpack.errors import errors +from tempfile import mkstemp +import mozpack.path as mozpath +import struct +import os +import re +import subprocess +import buildconfig +from collections import OrderedDict + +# Regular expressions for unifying install.rdf +FIND_TARGET_PLATFORM = re.compile(r""" + <(?P<ns>[-._0-9A-Za-z]+:)?targetPlatform> # The targetPlatform tag, with any namespace + (?P<platform>[^<]*) # The actual platform value + </(?P=ns)?targetPlatform> # The closing tag + """, re.X) +FIND_TARGET_PLATFORM_ATTR = re.compile(r""" + (?P<tag><(?:[-._0-9A-Za-z]+:)?Description) # The opening part of the <Description> tag + (?P<attrs>[^>]*?)\s+ # The initial attributes + (?P<ns>[-._0-9A-Za-z]+:)?targetPlatform= # The targetPlatform attribute, with any namespace + [\'"](?P<platform>[^\'"]+)[\'"] # The actual platform value + (?P<otherattrs>[^>]*?>) # The remaining attributes and closing angle bracket + """, re.X) + +def may_unify_binary(file): + ''' + Return whether the given BaseFile instance is an ExecutableFile that + may be unified. Only non-fat Mach-O binaries are to be unified. + ''' + if isinstance(file, ExecutableFile): + signature = file.open().read(4) + if len(signature) < 4: + return False + signature = struct.unpack('>L', signature)[0] + if signature in MACHO_SIGNATURES: + return True + return False + + +class UnifiedExecutableFile(BaseFile): + ''' + File class for executable and library files that to be unified with 'lipo'. + ''' + def __init__(self, executable1, executable2): + ''' + Initialize a UnifiedExecutableFile with a pair of ExecutableFiles to + be unified. They are expected to be non-fat Mach-O executables. + ''' + assert isinstance(executable1, ExecutableFile) + assert isinstance(executable2, ExecutableFile) + self._executables = (executable1, executable2) + + def copy(self, dest, skip_if_older=True): + ''' + Create a fat executable from the two Mach-O executable given when + creating the instance. + skip_if_older is ignored. + ''' + assert isinstance(dest, basestring) + tmpfiles = [] + try: + for e in self._executables: + fd, f = mkstemp() + os.close(fd) + tmpfiles.append(f) + e.copy(f, skip_if_older=False) + lipo = buildconfig.substs.get('LIPO') or 'lipo' + subprocess.call([lipo, '-create'] + tmpfiles + ['-output', dest]) + finally: + for f in tmpfiles: + os.unlink(f) + + +class UnifiedFinder(BaseFinder): + ''' + Helper to get unified BaseFile instances from two distinct trees on the + file system. + ''' + def __init__(self, finder1, finder2, sorted=[], **kargs): + ''' + Initialize a UnifiedFinder. finder1 and finder2 are BaseFinder + instances from which files are picked. UnifiedFinder.find() will act as + FileFinder.find() but will error out when matches can only be found in + one of the two trees and not the other. It will also error out if + matches can be found on both ends but their contents are not identical. + + The sorted argument gives a list of mozpath.match patterns. File + paths matching one of these patterns will have their contents compared + with their lines sorted. + ''' + assert isinstance(finder1, BaseFinder) + assert isinstance(finder2, BaseFinder) + self._finder1 = finder1 + self._finder2 = finder2 + self._sorted = sorted + BaseFinder.__init__(self, finder1.base, **kargs) + + def _find(self, path): + ''' + UnifiedFinder.find() implementation. + ''' + files1 = OrderedDict() + for p, f in self._finder1.find(path): + files1[p] = f + files2 = set() + for p, f in self._finder2.find(path): + files2.add(p) + if p in files1: + if may_unify_binary(files1[p]) and \ + may_unify_binary(f): + yield p, UnifiedExecutableFile(files1[p], f) + else: + err = errors.count + unified = self.unify_file(p, files1[p], f) + if unified: + yield p, unified + elif err == errors.count: + self._report_difference(p, files1[p], f) + else: + errors.error('File missing in %s: %s' % + (self._finder1.base, p)) + for p in [p for p in files1 if not p in files2]: + errors.error('File missing in %s: %s' % (self._finder2.base, p)) + + def _report_difference(self, path, file1, file2): + ''' + Report differences between files in both trees. + ''' + errors.error("Can't unify %s: file differs between %s and %s" % + (path, self._finder1.base, self._finder2.base)) + if not isinstance(file1, ExecutableFile) and \ + not isinstance(file2, ExecutableFile): + from difflib import unified_diff + for line in unified_diff(file1.open().readlines(), + file2.open().readlines(), + os.path.join(self._finder1.base, path), + os.path.join(self._finder2.base, path)): + errors.out.write(line) + + def unify_file(self, path, file1, file2): + ''' + Given two BaseFiles and the path they were found at, check whether + their content match and return the first BaseFile if they do. + ''' + content1 = file1.open().readlines() + content2 = file2.open().readlines() + if content1 == content2: + return file1 + for pattern in self._sorted: + if mozpath.match(path, pattern): + if sorted(content1) == sorted(content2): + return file1 + break + return None + + +class UnifiedBuildFinder(UnifiedFinder): + ''' + Specialized UnifiedFinder for Mozilla applications packaging. It allows + "*.manifest" files to differ in their order, and unifies "buildconfig.html" + files by merging their content. + ''' + def __init__(self, finder1, finder2, **kargs): + UnifiedFinder.__init__(self, finder1, finder2, + sorted=['**/*.manifest'], **kargs) + + def unify_file(self, path, file1, file2): + ''' + Unify files taking Mozilla application special cases into account. + Otherwise defer to UnifiedFinder.unify_file. + ''' + basename = mozpath.basename(path) + if basename == 'buildconfig.html': + content1 = file1.open().readlines() + content2 = file2.open().readlines() + # Copy everything from the first file up to the end of its <body>, + # insert a <hr> between the two files and copy the second file's + # content beginning after its leading <h1>. + return GeneratedFile(''.join( + content1[:content1.index('</body>\n')] + + ['<hr> </hr>\n'] + + content2[content2.index('<h1>about:buildconfig</h1>\n') + 1:] + )) + elif basename == 'install.rdf': + # install.rdf files often have em:targetPlatform (either as + # attribute or as tag) that will differ between platforms. The + # unified install.rdf should contain both em:targetPlatforms if + # they exist, or strip them if only one file has a target platform. + content1, content2 = ( + FIND_TARGET_PLATFORM_ATTR.sub(lambda m: \ + m.group('tag') + m.group('attrs') + m.group('otherattrs') + + '<%stargetPlatform>%s</%stargetPlatform>' % \ + (m.group('ns') or "", m.group('platform'), m.group('ns') or ""), + f.open().read() + ) for f in (file1, file2) + ) + + platform2 = FIND_TARGET_PLATFORM.search(content2) + return GeneratedFile(FIND_TARGET_PLATFORM.sub( + lambda m: m.group(0) + platform2.group(0) if platform2 else '', + content1 + )) + elif path.endswith('.xpi'): + finder1 = JarFinder(os.path.join(self._finder1.base, path), + JarReader(fileobj=file1.open())) + finder2 = JarFinder(os.path.join(self._finder2.base, path), + JarReader(fileobj=file2.open())) + unifier = UnifiedFinder(finder1, finder2, sorted=self._sorted) + err = errors.count + all(unifier.find('')) + if err == errors.count: + return file1 + return None + return UnifiedFinder.unify_file(self, path, file1, file2) |