summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozpack/test
diff options
context:
space:
mode:
Diffstat (limited to 'python/mozbuild/mozpack/test')
-rw-r--r--python/mozbuild/mozpack/test/__init__.py0
-rw-r--r--python/mozbuild/mozpack/test/data/test_data1
-rw-r--r--python/mozbuild/mozpack/test/support/minify_js_verify.py17
-rw-r--r--python/mozbuild/mozpack/test/test_archive.py190
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_flags.py148
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_manifest.py149
-rw-r--r--python/mozbuild/mozpack/test/test_copier.py529
-rw-r--r--python/mozbuild/mozpack/test/test_errors.py93
-rw-r--r--python/mozbuild/mozpack/test/test_files.py1160
-rw-r--r--python/mozbuild/mozpack/test/test_manifests.py375
-rw-r--r--python/mozbuild/mozpack/test/test_mozjar.py342
-rw-r--r--python/mozbuild/mozpack/test/test_packager.py490
-rw-r--r--python/mozbuild/mozpack/test/test_packager_formats.py428
-rw-r--r--python/mozbuild/mozpack/test/test_packager_l10n.py126
-rw-r--r--python/mozbuild/mozpack/test/test_packager_unpack.py65
-rw-r--r--python/mozbuild/mozpack/test/test_path.py143
-rw-r--r--python/mozbuild/mozpack/test/test_unify.py199
17 files changed, 4455 insertions, 0 deletions
diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/python/mozbuild/mozpack/test/__init__.py
diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data
new file mode 100644
index 000000000..fb7f0c4fc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/data/test_data
@@ -0,0 +1 @@
+test_data \ No newline at end of file
diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py
new file mode 100644
index 000000000..8e4e8b759
--- /dev/null
+++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function
+import sys
+
+
+if len(sys.argv) != 4:
+ raise Exception('Usage: minify_js_verify <exitcode> <orig> <minified>')
+
+retcode = int(sys.argv[1])
+
+if retcode:
+ print('Error message', file=sys.stderr)
+
+sys.exit(retcode)
diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py
new file mode 100644
index 000000000..6f61f7eb7
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_archive.py
@@ -0,0 +1,190 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import hashlib
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+
+from mozpack.archive import (
+ DEFAULT_MTIME,
+ create_tar_from_files,
+ create_tar_gz_from_files,
+ create_tar_bz2_from_files,
+)
+
+from mozunit import main
+
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+def file_hash(path):
+ h = hashlib.sha1()
+ with open(path, 'rb') as fh:
+ while True:
+ data = fh.read(8192)
+ if not data:
+ break
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class TestArchive(unittest.TestCase):
+ def _create_files(self, root):
+ files = {}
+ for i in range(10):
+ p = os.path.join(root, b'file%d' % i)
+ with open(p, 'wb') as fh:
+ fh.write(b'file%d' % i)
+ # Need to set permissions or umask may influence testing.
+ os.chmod(p, MODE_STANDARD)
+ files[b'file%d' % i] = p
+
+ return files
+
+ def _verify_basic_tarfile(self, tf):
+ self.assertEqual(len(tf.getmembers()), 10)
+
+ names = ['file%d' % i for i in range(10)]
+ self.assertEqual(tf.getnames(), names)
+
+ for ti in tf.getmembers():
+ self.assertEqual(ti.uid, 0)
+ self.assertEqual(ti.gid, 0)
+ self.assertEqual(ti.uname, '')
+ self.assertEqual(ti.gname, '')
+ self.assertEqual(ti.mode, MODE_STANDARD)
+ self.assertEqual(ti.mtime, DEFAULT_MTIME)
+
+ def test_dirs_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ with self.assertRaisesRegexp(ValueError, 'not a regular'):
+ create_tar_from_files(fh, {'test': d})
+ finally:
+ shutil.rmtree(d)
+
+ def test_setuid_setgid_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ uid = os.path.join(d, 'setuid')
+ gid = os.path.join(d, 'setgid')
+ with open(uid, 'a'):
+ pass
+ with open(gid, 'a'):
+ pass
+
+ os.chmod(uid, MODE_STANDARD | stat.S_ISUID)
+ os.chmod(gid, MODE_STANDARD | stat.S_ISGID)
+
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'):
+ create_tar_from_files(fh, {'test': uid})
+ with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'):
+ create_tar_from_files(fh, {'test': gid})
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ create_tar_from_files(fh, files)
+
+ # Output should be deterministic.
+ self.assertEqual(file_hash(tp), 'cd16cee6f13391abd94dfa435d2633b61ed727f1')
+
+ with tarfile.open(tp, 'r') as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_executable_preserved(self):
+ d = tempfile.mkdtemp()
+ try:
+ p = os.path.join(d, 'exec')
+ with open(p, 'wb') as fh:
+ fh.write('#!/bin/bash\n')
+ os.chmod(p, MODE_STANDARD | stat.S_IXUSR)
+
+ tp = os.path.join(d, 'test.tar')
+ with open(tp, 'wb') as fh:
+ create_tar_from_files(fh, {'exec': p})
+
+ self.assertEqual(file_hash(tp), '357e1b81c0b6cfdfa5d2d118d420025c3c76ee93')
+
+ with tarfile.open(tp, 'r') as tf:
+ m = tf.getmember('exec')
+ self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_gz_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, 'test.tar.gz')
+ with open(gp, 'wb') as fh:
+ create_tar_gz_from_files(fh, files)
+
+ self.assertEqual(file_hash(gp), 'acb602239c1aeb625da5e69336775609516d60f5')
+
+ with tarfile.open(gp, 'r:gz') as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_tar_gz_name(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, 'test.tar.gz')
+ with open(gp, 'wb') as fh:
+ create_tar_gz_from_files(fh, files, filename='foobar', compresslevel=1)
+
+ self.assertEqual(file_hash(gp), 'fd099f96480cc1100f37baa8e89a6b820dbbcbd3')
+
+ with tarfile.open(gp, 'r:gz') as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_bz2_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ bp = os.path.join(d, 'test.tar.bz2')
+ with open(bp, 'wb') as fh:
+ create_tar_bz2_from_files(fh, files)
+
+ self.assertEqual(file_hash(bp), '1827ad00dfe7acf857b7a1c95ce100361e3f6eea')
+
+ with tarfile.open(bp, 'r:bz2') as tf:
+ self._verify_basic_tarfile(tf)
+ finally:
+ shutil.rmtree(d)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py
new file mode 100644
index 000000000..e6a5257e9
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_flags.py
@@ -0,0 +1,148 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+from mozpack.chrome.flags import (
+ Flag,
+ StringFlag,
+ VersionFlag,
+ Flags,
+)
+from mozpack.errors import ErrorMessage
+
+
+class TestFlag(unittest.TestCase):
+ def test_flag(self):
+ flag = Flag('flag')
+ self.assertEqual(str(flag), '')
+ self.assertTrue(flag.matches(False))
+ self.assertTrue(flag.matches('false'))
+ self.assertFalse(flag.matches('true'))
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag=')
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag=42')
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=false')
+
+ flag.add_definition('flag=1')
+ self.assertEqual(str(flag), 'flag=1')
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches('1'))
+ self.assertFalse(flag.matches('no'))
+
+ flag.add_definition('flag=true')
+ self.assertEqual(str(flag), 'flag=true')
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches('true'))
+ self.assertFalse(flag.matches('0'))
+
+ flag.add_definition('flag=no')
+ self.assertEqual(str(flag), 'flag=no')
+ self.assertTrue(flag.matches('false'))
+ self.assertFalse(flag.matches('1'))
+
+ flag.add_definition('flag')
+ self.assertEqual(str(flag), 'flag')
+ self.assertFalse(flag.matches('false'))
+ self.assertTrue(flag.matches('true'))
+ self.assertFalse(flag.matches(False))
+
+ def test_string_flag(self):
+ flag = StringFlag('flag')
+ self.assertEqual(str(flag), '')
+ self.assertTrue(flag.matches('foo'))
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag>=2')
+
+ flag.add_definition('flag=foo')
+ self.assertEqual(str(flag), 'flag=foo')
+ self.assertTrue(flag.matches('foo'))
+ self.assertFalse(flag.matches('bar'))
+
+ flag.add_definition('flag=bar')
+ self.assertEqual(str(flag), 'flag=foo flag=bar')
+ self.assertTrue(flag.matches('foo'))
+ self.assertTrue(flag.matches('bar'))
+ self.assertFalse(flag.matches('baz'))
+
+ flag = StringFlag('flag')
+ flag.add_definition('flag!=bar')
+ self.assertEqual(str(flag), 'flag!=bar')
+ self.assertTrue(flag.matches('foo'))
+ self.assertFalse(flag.matches('bar'))
+
+ def test_version_flag(self):
+ flag = VersionFlag('flag')
+ self.assertEqual(str(flag), '')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=2')
+
+ flag.add_definition('flag=1.0')
+ self.assertEqual(str(flag), 'flag=1.0')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertFalse(flag.matches('2.0'))
+
+ flag.add_definition('flag=2.0')
+ self.assertEqual(str(flag), 'flag=1.0 flag=2.0')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertTrue(flag.matches('2.0'))
+ self.assertFalse(flag.matches('3.0'))
+
+ flag = VersionFlag('flag')
+ flag.add_definition('flag>=2.0')
+ self.assertEqual(str(flag), 'flag>=2.0')
+ self.assertFalse(flag.matches('1.0'))
+ self.assertTrue(flag.matches('2.0'))
+ self.assertTrue(flag.matches('3.0'))
+
+ flag.add_definition('flag<1.10')
+ self.assertEqual(str(flag), 'flag>=2.0 flag<1.10')
+ self.assertTrue(flag.matches('1.0'))
+ self.assertTrue(flag.matches('1.9'))
+ self.assertFalse(flag.matches('1.10'))
+ self.assertFalse(flag.matches('1.20'))
+ self.assertTrue(flag.matches('2.0'))
+ self.assertTrue(flag.matches('3.0'))
+ self.assertRaises(Exception, flag.add_definition, 'flag<')
+ self.assertRaises(Exception, flag.add_definition, 'flag>')
+ self.assertRaises(Exception, flag.add_definition, 'flag>=')
+ self.assertRaises(Exception, flag.add_definition, 'flag<=')
+ self.assertRaises(Exception, flag.add_definition, 'flag!=1.0')
+
+
+class TestFlags(unittest.TestCase):
+ def setUp(self):
+ self.flags = Flags('contentaccessible=yes',
+ 'appversion>=3.5',
+ 'application=foo',
+ 'application=bar',
+ 'appversion<2.0',
+ 'platform',
+ 'abi!=Linux_x86-gcc3')
+
+ def test_flags_str(self):
+ self.assertEqual(str(self.flags), 'contentaccessible=yes ' +
+ 'appversion>=3.5 appversion<2.0 application=foo ' +
+ 'application=bar platform abi!=Linux_x86-gcc3')
+
+ def test_flags_match_unset(self):
+ self.assertTrue(self.flags.match(os='WINNT'))
+
+ def test_flags_match(self):
+ self.assertTrue(self.flags.match(application='foo'))
+ self.assertFalse(self.flags.match(application='qux'))
+
+ def test_flags_match_different(self):
+ self.assertTrue(self.flags.match(abi='WINNT_x86-MSVC'))
+ self.assertFalse(self.flags.match(abi='Linux_x86-gcc3'))
+
+ def test_flags_match_version(self):
+ self.assertTrue(self.flags.match(appversion='1.0'))
+ self.assertTrue(self.flags.match(appversion='1.5'))
+ self.assertFalse(self.flags.match(appversion='2.0'))
+ self.assertFalse(self.flags.match(appversion='3.0'))
+ self.assertTrue(self.flags.match(appversion='3.5'))
+ self.assertTrue(self.flags.match(appversion='3.10'))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py
new file mode 100644
index 000000000..690c6acdc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py
@@ -0,0 +1,149 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+import os
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestLocale,
+ ManifestSkin,
+ Manifest,
+ ManifestResource,
+ ManifestOverride,
+ ManifestComponent,
+ ManifestContract,
+ ManifestInterfaces,
+ ManifestBinaryComponent,
+ ManifestCategory,
+ ManifestStyle,
+ ManifestOverlay,
+ MANIFESTS_TYPES,
+ parse_manifest,
+ parse_manifest_line,
+)
+from mozpack.errors import errors, AccumulatedErrors
+from test_errors import TestErrors
+
+
+class TestManifest(unittest.TestCase):
+ def test_parse_manifest(self):
+ manifest = [
+ 'content global content/global/',
+ 'content global content/global/ application=foo application=bar' +
+ ' platform',
+ 'locale global en-US content/en-US/',
+ 'locale global en-US content/en-US/ application=foo',
+ 'skin global classic/1.0 content/skin/classic/',
+ 'skin global classic/1.0 content/skin/classic/ application=foo' +
+ ' os=WINNT',
+ '',
+ 'manifest pdfjs/chrome.manifest',
+ 'resource gre-resources toolkit/res/',
+ 'override chrome://global/locale/netError.dtd' +
+ ' chrome://browser/locale/netError.dtd',
+ '# Comment',
+ 'component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js',
+ 'contract @mozilla.org/foo;1' +
+ ' {b2bba4df-057d-41ea-b6b1-94a10a8ede68}',
+ 'interfaces foo.xpt',
+ 'binary-component bar.so',
+ 'category command-line-handler m-browser' +
+ ' @mozilla.org/browser/clh;1' +
+ ' application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
+ 'style chrome://global/content/customizeToolbar.xul' +
+ ' chrome://browser/skin/',
+ 'overlay chrome://global/content/viewSource.xul' +
+ ' chrome://browser/content/viewSourceOverlay.xul',
+ ]
+ other_manifest = [
+ 'content global content/global/'
+ ]
+ expected_result = [
+ ManifestContent('', 'global', 'content/global/'),
+ ManifestContent('', 'global', 'content/global/', 'application=foo',
+ 'application=bar', 'platform'),
+ ManifestLocale('', 'global', 'en-US', 'content/en-US/'),
+ ManifestLocale('', 'global', 'en-US', 'content/en-US/',
+ 'application=foo'),
+ ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/'),
+ ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/',
+ 'application=foo', 'os=WINNT'),
+ Manifest('', 'pdfjs/chrome.manifest'),
+ ManifestResource('', 'gre-resources', 'toolkit/res/'),
+ ManifestOverride('', 'chrome://global/locale/netError.dtd',
+ 'chrome://browser/locale/netError.dtd'),
+ ManifestComponent('', '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}',
+ 'foo.js'),
+ ManifestContract('', '@mozilla.org/foo;1',
+ '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}'),
+ ManifestInterfaces('', 'foo.xpt'),
+ ManifestBinaryComponent('', 'bar.so'),
+ ManifestCategory('', 'command-line-handler', 'm-browser',
+ '@mozilla.org/browser/clh;1', 'application=' +
+ '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'),
+ ManifestStyle('', 'chrome://global/content/customizeToolbar.xul',
+ 'chrome://browser/skin/'),
+ ManifestOverlay('', 'chrome://global/content/viewSource.xul',
+ 'chrome://browser/content/viewSourceOverlay.xul'),
+ ]
+ with mozunit.MockedOpen({'manifest': '\n'.join(manifest),
+ 'other/manifest': '\n'.join(other_manifest)}):
+ # Ensure we have tests for all types of manifests.
+ self.assertEqual(set(type(e) for e in expected_result),
+ set(MANIFESTS_TYPES.values()))
+ self.assertEqual(list(parse_manifest(os.curdir, 'manifest')),
+ expected_result)
+ self.assertEqual(list(parse_manifest(os.curdir, 'other/manifest')),
+ [ManifestContent('other', 'global',
+ 'content/global/')])
+
+ def test_manifest_rebase(self):
+ m = parse_manifest_line('chrome', 'content global content/global/')
+ m = m.rebase('')
+ self.assertEqual(str(m), 'content global chrome/content/global/')
+ m = m.rebase('chrome')
+ self.assertEqual(str(m), 'content global content/global/')
+
+ m = parse_manifest_line('chrome/foo', 'content global content/global/')
+ m = m.rebase('chrome')
+ self.assertEqual(str(m), 'content global foo/content/global/')
+ m = m.rebase('chrome/foo')
+ self.assertEqual(str(m), 'content global content/global/')
+
+ m = parse_manifest_line('modules/foo', 'resource foo ./')
+ m = m.rebase('modules')
+ self.assertEqual(str(m), 'resource foo foo/')
+ m = m.rebase('modules/foo')
+ self.assertEqual(str(m), 'resource foo ./')
+
+ m = parse_manifest_line('chrome', 'content browser browser/content/')
+ m = m.rebase('chrome/browser').move('jar:browser.jar!').rebase('')
+ self.assertEqual(str(m), 'content browser jar:browser.jar!/content/')
+
+
+class TestManifestErrors(TestErrors, unittest.TestCase):
+ def test_parse_manifest_errors(self):
+ manifest = [
+ 'skin global classic/1.0 content/skin/classic/ platform',
+ '',
+ 'binary-component bar.so',
+ 'unsupported foo',
+ ]
+ with mozunit.MockedOpen({'manifest': '\n'.join(manifest)}):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ list(parse_manifest(os.curdir, 'manifest'))
+ out = self.get_output()
+ # Expecting 2 errors
+ self.assertEqual(len(out), 2)
+ path = os.path.abspath('manifest')
+ # First on line 1
+ self.assertTrue(out[0].startswith('Error: %s:1: ' % path))
+ # Second on line 4
+ self.assertTrue(out[1].startswith('Error: %s:4: ' % path))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py
new file mode 100644
index 000000000..6688b3d5e
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_copier.py
@@ -0,0 +1,529 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+ FileRegistrySubtree,
+ Jarrer,
+)
+from mozpack.files import (
+ GeneratedFile,
+ ExistingFile,
+)
+from mozpack.mozjar import JarReader
+import mozpack.path as mozpath
+import unittest
+import mozunit
+import os
+import stat
+from mozpack.errors import ErrorMessage
+from mozpack.test.test_files import (
+ MockDest,
+ MatchTestTemplate,
+ TestWithTmpDir,
+)
+
+
+class BaseTestFileRegistry(MatchTestTemplate):
+ def add(self, path):
+ self.registry.add(path, GeneratedFile(path))
+
+ def do_check(self, pattern, result):
+ self.checked = True
+ if result:
+ self.assertTrue(self.registry.contains(pattern))
+ else:
+ self.assertFalse(self.registry.contains(pattern))
+ self.assertEqual(self.registry.match(pattern), result)
+
+ def do_test_file_registry(self, registry):
+ self.registry = registry
+ self.registry.add('foo', GeneratedFile('foo'))
+ bar = GeneratedFile('bar')
+ self.registry.add('bar', bar)
+ self.assertEqual(self.registry.paths(), ['foo', 'bar'])
+ self.assertEqual(self.registry['bar'], bar)
+
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo',
+ GeneratedFile('foo2'))
+
+ self.assertRaises(ErrorMessage, self.registry.remove, 'qux')
+
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar',
+ GeneratedFile('foobar'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar/baz',
+ GeneratedFile('foobar'))
+
+ self.assertEqual(self.registry.paths(), ['foo', 'bar'])
+
+ self.registry.remove('foo')
+ self.assertEqual(self.registry.paths(), ['bar'])
+ self.registry.remove('bar')
+ self.assertEqual(self.registry.paths(), [])
+
+ self.prepare_match_test()
+ self.do_match_test()
+ self.assertTrue(self.checked)
+ self.assertEqual(self.registry.paths(), [
+ 'bar',
+ 'foo/bar',
+ 'foo/baz',
+ 'foo/qux/1',
+ 'foo/qux/bar',
+ 'foo/qux/2/test',
+ 'foo/qux/2/test2',
+ ])
+
+ self.registry.remove('foo/qux')
+ self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz'])
+
+ self.registry.add('foo/qux', GeneratedFile('fooqux'))
+ self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz',
+ 'foo/qux'])
+ self.registry.remove('foo/b*')
+ self.assertEqual(self.registry.paths(), ['bar', 'foo/qux'])
+
+ self.assertEqual([f for f, c in self.registry], ['bar', 'foo/qux'])
+ self.assertEqual(len(self.registry), 2)
+
+ self.add('foo/.foo')
+ self.assertTrue(self.registry.contains('foo/.foo'))
+
+ def do_test_registry_paths(self, registry):
+ self.registry = registry
+
+ # Can't add a file if it requires a directory in place of a
+ # file we also require.
+ self.registry.add('foo', GeneratedFile('foo'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar',
+ GeneratedFile('foobar'))
+
+ # Can't add a file if we already have a directory there.
+ self.registry.add('bar/baz', GeneratedFile('barbaz'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'bar',
+ GeneratedFile('bar'))
+
+ # Bump the count of things that require bar/ to 2.
+ self.registry.add('bar/zot', GeneratedFile('barzot'))
+ self.assertRaises(ErrorMessage, self.registry.add, 'bar',
+ GeneratedFile('bar'))
+
+ # Drop the count of things that require bar/ to 1.
+ self.registry.remove('bar/baz')
+ self.assertRaises(ErrorMessage, self.registry.add, 'bar',
+ GeneratedFile('bar'))
+
+ # Drop the count of things that require bar/ to 0.
+ self.registry.remove('bar/zot')
+ self.registry.add('bar/zot', GeneratedFile('barzot'))
+
+class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase):
+ def test_partial_paths(self):
+ cases = {
+ 'foo/bar/baz/zot': ['foo/bar/baz', 'foo/bar', 'foo'],
+ 'foo/bar': ['foo'],
+ 'bar': [],
+ }
+ reg = FileRegistry()
+ for path, parts in cases.iteritems():
+ self.assertEqual(reg._partial_paths(path), parts)
+
+ def test_file_registry(self):
+ self.do_test_file_registry(FileRegistry())
+
+ def test_registry_paths(self):
+ self.do_test_registry_paths(FileRegistry())
+
+ def test_required_directories(self):
+ self.registry = FileRegistry()
+
+ self.registry.add('foo', GeneratedFile('foo'))
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add('bar/baz', GeneratedFile('barbaz'))
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.add('bar/zot', GeneratedFile('barzot'))
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.add('bar/zap/zot', GeneratedFile('barzapzot'))
+ self.assertEqual(self.registry.required_directories(), {'bar', 'bar/zap'})
+
+ self.registry.remove('bar/zap/zot')
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.remove('bar/baz')
+ self.assertEqual(self.registry.required_directories(), {'bar'})
+
+ self.registry.remove('bar/zot')
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add('x/y/z', GeneratedFile('xyz'))
+ self.assertEqual(self.registry.required_directories(), {'x', 'x/y'})
+
+
+class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase):
+ def test_file_registry_subtree_base(self):
+ registry = FileRegistry()
+ self.assertEqual(registry, FileRegistrySubtree('', registry))
+ self.assertNotEqual(registry, FileRegistrySubtree('base', registry))
+
+ def create_registry(self):
+ registry = FileRegistry()
+ registry.add('foo/bar', GeneratedFile('foo/bar'))
+ registry.add('baz/qux', GeneratedFile('baz/qux'))
+ return FileRegistrySubtree('base/root', registry)
+
+ def test_file_registry_subtree(self):
+ self.do_test_file_registry(self.create_registry())
+
+ def test_registry_paths_subtree(self):
+ registry = FileRegistry()
+ self.do_test_registry_paths(self.create_registry())
+
+
+class TestFileCopier(TestWithTmpDir):
+ def all_dirs(self, base):
+ all_dirs = set()
+ for root, dirs, files in os.walk(base):
+ if not dirs:
+ all_dirs.add(mozpath.relpath(root, base))
+ return all_dirs
+
+ def all_files(self, base):
+ all_files = set()
+ for root, dirs, files in os.walk(base):
+ for f in files:
+ all_files.add(
+ mozpath.join(mozpath.relpath(root, base), f))
+ return all_files
+
+ def test_file_copier(self):
+ copier = FileCopier()
+ copier.add('foo/bar', GeneratedFile('foobar'))
+ copier.add('foo/qux', GeneratedFile('fooqux'))
+ copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
+ copier.add('bar', GeneratedFile('bar'))
+ copier.add('qux/foo', GeneratedFile('quxfoo'))
+ copier.add('qux/bar', GeneratedFile(''))
+
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir),
+ set(['foo/deep/nested/directory', 'qux']))
+
+ self.assertEqual(result.updated_files, set(self.tmppath(p) for p in
+ self.all_files(self.tmpdir)))
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ copier.remove('foo')
+ copier.add('test', GeneratedFile('test'))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
+ self.assertEqual(result.removed_files, set(self.tmppath(p) for p in
+ ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file')))
+
+ def test_symlink_directory_replaced(self):
+ """Directory symlinks in destination are replaced if they need to be
+ real directories."""
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
+
+ os.makedirs(self.tmppath('dest/foo'))
+ dummy = self.tmppath('dummy')
+ os.mkdir(dummy)
+ link = self.tmppath('dest/foo/bar')
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest)
+
+ st = os.lstat(link)
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ def test_remove_unaccounted_directory_symlinks(self):
+ """Directory symlinks in destination that are not in the way are
+ deleted according to remove_unaccounted and
+ remove_all_directory_symlinks.
+ """
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
+
+ os.makedirs(self.tmppath('dest/foo'))
+ dummy = self.tmppath('dummy')
+ os.mkdir(dummy)
+
+ os.mkdir(self.tmppath('dest/zot'))
+ link = self.tmppath('dest/zot/zap')
+ os.symlink(dummy, link)
+
+ # If not remove_unaccounted but remove_empty_directories, then
+ # the symlinked directory remains (as does its containing
+ # directory).
+ result = copier.copy(dest, remove_unaccounted=False,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False)
+
+ st = os.lstat(link)
+ self.assertTrue(stat.S_ISLNK(st.st_mode))
+ self.assertFalse(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(['foo/bar']))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ # If remove_unaccounted but not remove_empty_directories, then
+ # only the symlinked directory is removed.
+ result = copier.copy(dest, remove_unaccounted=True,
+ remove_empty_directories=False,
+ remove_all_directory_symlinks=False)
+
+ st = os.lstat(self.tmppath('dest/zot'))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set())
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(['foo/bar', 'zot']))
+
+ # If remove_unaccounted and remove_empty_directories, then
+ # both the symlink and its containing directory are removed.
+ link = self.tmppath('dest/zot/zap')
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest, remove_unaccounted=True,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False)
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set([self.tmppath('dest/zot')]))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(['foo/bar']))
+
+ def test_permissions(self):
+ """Ensure files without write permission can be deleted."""
+ with open(self.tmppath('dummy'), 'a'):
+ pass
+
+ p = self.tmppath('no_perms')
+ with open(p, 'a'):
+ pass
+
+ # Make file and directory unwritable. Reminder: making a directory
+ # unwritable prevents modifications (including deletes) from the list
+ # of files in that directory.
+ os.chmod(p, 0o400)
+ os.chmod(self.tmpdir, 0o400)
+
+ copier = FileCopier()
+ copier.add('dummy', GeneratedFile('content'))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(result.removed_files_count, 1)
+ self.assertFalse(os.path.exists(p))
+
+ def test_no_remove(self):
+ copier = FileCopier()
+ copier.add('foo', GeneratedFile('foo'))
+
+ with open(self.tmppath('bar'), 'a'):
+ pass
+
+ os.mkdir(self.tmppath('emptydir'))
+ d = self.tmppath('populateddir')
+ os.mkdir(d)
+
+ with open(self.tmppath('populateddir/foo'), 'a'):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False)
+
+ self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar',
+ 'populateddir/foo']))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(['populateddir']))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories,
+ set([self.tmppath('emptydir')]))
+
+ def test_no_remove_empty_directories(self):
+ copier = FileCopier()
+ copier.add('foo', GeneratedFile('foo'))
+
+ with open(self.tmppath('bar'), 'a'):
+ pass
+
+ os.mkdir(self.tmppath('emptydir'))
+ d = self.tmppath('populateddir')
+ os.mkdir(d)
+
+ with open(self.tmppath('populateddir/foo'), 'a'):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False,
+ remove_empty_directories=False)
+
+ self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar',
+ 'populateddir/foo']))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(['emptydir',
+ 'populateddir']))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ def test_optional_exists_creates_unneeded_directory(self):
+ """Demonstrate that a directory not strictly required, but specified
+ as the path to an optional file, will be unnecessarily created.
+
+ This behaviour is wrong; fixing it is tracked by Bug 972432;
+ and this test exists to guard against unexpected changes in
+ behaviour.
+ """
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar', ExistingFile(required=False))
+
+ result = copier.copy(dest)
+
+ st = os.lstat(self.tmppath('dest/foo'))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ # What's worse, we have no record that dest was created.
+ self.assertEquals(len(result.updated_files), 0)
+
+ # But we do have an erroneous record of an optional file
+ # existing when it does not.
+ self.assertIn(self.tmppath('dest/foo/bar'), result.existing_files)
+
+ def test_remove_unaccounted_file_registry(self):
+ """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""
+
+ dest = self.tmppath('dest')
+
+ copier = FileCopier()
+ copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
+ copier.add('foo/bar/qux', GeneratedFile('foobarqux'))
+ copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))
+ copier.add('foo/toto/tata', GeneratedFile('footototata'))
+
+ os.makedirs(os.path.join(dest, 'bar'))
+ with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh:
+ fh.write('barbar');
+ os.makedirs(os.path.join(dest, 'foo', 'toto'))
+ with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh:
+ fh.write('foototototo');
+
+ result = copier.copy(dest, remove_unaccounted=False)
+
+ self.assertEqual(self.all_files(dest),
+ set(copier.paths()) | { 'foo/toto/toto', 'bar/bar'})
+ self.assertEqual(self.all_dirs(dest),
+ {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'})
+
+ copier2 = FileCopier()
+ copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))
+
+ # We expect only files copied from the first copier to be removed,
+ # not the extra file that was there beforehand.
+ result = copier2.copy(dest, remove_unaccounted=copier)
+
+ self.assertEqual(self.all_files(dest),
+ set(copier2.paths()) | { 'foo/toto/toto', 'bar/bar'})
+ self.assertEqual(self.all_dirs(dest),
+ {'foo/hoge', 'foo/toto', 'bar'})
+ self.assertEqual(result.updated_files,
+ {self.tmppath('dest/foo/hoge/fuga')})
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, {self.tmppath(p) for p in
+ ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata')})
+ self.assertEqual(result.removed_directories,
+ {self.tmppath('dest/foo/bar')})
+
+
+class TestJarrer(unittest.TestCase):
+ def check_jar(self, dest, copier):
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], copier.paths())
+ for f in jar:
+ self.assertEqual(f.uncompressed_data.read(),
+ copier[f.filename].content)
+
+ def test_jarrer(self):
+ copier = Jarrer()
+ copier.add('foo/bar', GeneratedFile('foobar'))
+ copier.add('foo/qux', GeneratedFile('fooqux'))
+ copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
+ copier.add('bar', GeneratedFile('bar'))
+ copier.add('qux/foo', GeneratedFile('quxfoo'))
+ copier.add('qux/bar', GeneratedFile(''))
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove('foo')
+ copier.add('test', GeneratedFile('test'))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove('test')
+ copier.add('test', GeneratedFile('replaced-content'))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ preloaded = ['qux/bar', 'bar']
+ copier.preload(preloaded)
+ copier.copy(dest)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], preloaded +
+ [p for p in copier.paths() if not p in preloaded])
+ self.assertEqual(jar.last_preloaded, preloaded[-1])
+
+
+ def test_jarrer_compress(self):
+ copier = Jarrer()
+ copier.add('foo/bar', GeneratedFile('ffffff'))
+ copier.add('foo/qux', GeneratedFile('ffffff'), compress=False)
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertTrue(jar['foo/bar'].compressed)
+ self.assertFalse(jar['foo/qux'].compressed)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py
new file mode 100644
index 000000000..16e2b0496
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_errors.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.errors import (
+ errors,
+ ErrorMessage,
+ AccumulatedErrors,
+)
+import unittest
+import mozunit
+import sys
+from cStringIO import StringIO
+
+
+class TestErrors(object):
+ def setUp(self):
+ errors.out = StringIO()
+ errors.ignore_errors(False)
+
+ def tearDown(self):
+ errors.out = sys.stderr
+
+ def get_output(self):
+ return [l.strip() for l in errors.out.getvalue().splitlines()]
+
+
+class TestErrorsImpl(TestErrors, unittest.TestCase):
+ def test_plain_error(self):
+ errors.warn('foo')
+ self.assertRaises(ErrorMessage, errors.error, 'foo')
+ self.assertRaises(ErrorMessage, errors.fatal, 'foo')
+ self.assertEquals(self.get_output(), ['Warning: foo'])
+
+ def test_ignore_errors(self):
+ errors.ignore_errors()
+ errors.warn('foo')
+ errors.error('bar')
+ self.assertRaises(ErrorMessage, errors.fatal, 'foo')
+ self.assertEquals(self.get_output(), ['Warning: foo', 'Warning: bar'])
+
+ def test_no_error(self):
+ with errors.accumulate():
+ errors.warn('1')
+
+ def test_simple_error(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error('1')
+ self.assertEquals(self.get_output(), ['Error: 1'])
+
+ def test_error_loop(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ for i in range(3):
+ errors.error('%d' % i)
+ self.assertEquals(self.get_output(),
+ ['Error: 0', 'Error: 1', 'Error: 2'])
+
+ def test_multiple_errors(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error('foo')
+ for i in range(3):
+ if i == 2:
+ errors.warn('%d' % i)
+ else:
+ errors.error('%d' % i)
+ errors.error('bar')
+ self.assertEquals(self.get_output(),
+ ['Error: foo', 'Error: 0', 'Error: 1',
+ 'Warning: 2', 'Error: bar'])
+
+ def test_errors_context(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ self.assertEqual(errors.get_context(), None)
+ with errors.context('foo', 1):
+ self.assertEqual(errors.get_context(), ('foo', 1))
+ errors.error('a')
+ with errors.context('bar', 2):
+ self.assertEqual(errors.get_context(), ('bar', 2))
+ errors.error('b')
+ self.assertEqual(errors.get_context(), ('foo', 1))
+ errors.error('c')
+ self.assertEqual(self.get_output(), [
+ 'Error: foo:1: a',
+ 'Error: bar:2: b',
+ 'Error: foo:1: c',
+ ])
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py
new file mode 100644
index 000000000..6fd617828
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_files.py
@@ -0,0 +1,1160 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensureParentDir
+
+from mozpack.errors import (
+ ErrorMessage,
+ errors,
+)
+from mozpack.files import (
+ AbsoluteSymlinkFile,
+ ComposedFinder,
+ DeflatedFile,
+ Dest,
+ ExistingFile,
+ ExtractedTarFile,
+ FileFinder,
+ File,
+ GeneratedFile,
+ JarFinder,
+ TarFinder,
+ ManifestFile,
+ MercurialFile,
+ MercurialRevisionFinder,
+ MinifiedJavaScript,
+ MinifiedProperties,
+ PreprocessedFile,
+ XPTFile,
+)
+
+# We don't have hglib installed everywhere.
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+try:
+ from mozpack.hg import MercurialNativeRevisionFinder
+except ImportError:
+ MercurialNativeRevisionFinder = None
+
+from mozpack.mozjar import (
+ JarReader,
+ JarWriter,
+)
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestResource,
+ ManifestLocale,
+ ManifestOverride,
+)
+import unittest
+import mozfile
+import mozunit
+import os
+import random
+import string
+import sys
+import tarfile
+import mozpack.path as mozpath
+from tempfile import mkdtemp
+from io import BytesIO
+from StringIO import StringIO
+from xpt import Typelib
+
+
+class TestWithTmpDir(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ self.symlink_supported = False
+
+ if not hasattr(os, 'symlink'):
+ return
+
+ dummy_path = self.tmppath('dummy_file')
+ with open(dummy_path, 'a'):
+ pass
+
+ try:
+ os.symlink(dummy_path, self.tmppath('dummy_symlink'))
+ os.remove(self.tmppath('dummy_symlink'))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.symlink_supported = True
+
+
+ def tearDown(self):
+ mozfile.rmtree(self.tmpdir)
+
+ def tmppath(self, relpath):
+ return os.path.normpath(os.path.join(self.tmpdir, relpath))
+
+
+class MockDest(BytesIO, Dest):
+ def __init__(self):
+ BytesIO.__init__(self)
+ self.mode = None
+
+ def read(self, length=-1):
+ if self.mode != 'r':
+ self.seek(0)
+ self.mode = 'r'
+ return BytesIO.read(self, length)
+
+ def write(self, data):
+ if self.mode != 'w':
+ self.seek(0)
+ self.truncate(0)
+ self.mode = 'w'
+ return BytesIO.write(self, data)
+
+ def exists(self):
+ return True
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+
+
+class DestNoWrite(Dest):
+ def write(self, data):
+ raise RuntimeError
+
+
+class TestDest(TestWithTmpDir):
+ def test_dest(self):
+ dest = Dest(self.tmppath('dest'))
+ self.assertFalse(dest.exists())
+ dest.write('foo')
+ self.assertTrue(dest.exists())
+ dest.write('foo')
+ self.assertEqual(dest.read(4), 'foof')
+ self.assertEqual(dest.read(), 'oo')
+ self.assertEqual(dest.read(), '')
+ dest.write('bar')
+ self.assertEqual(dest.read(4), 'bar')
+ dest.close()
+ self.assertEqual(dest.read(), 'bar')
+ dest.write('foo')
+ dest.close()
+ dest.write('qux')
+ self.assertEqual(dest.read(), 'qux')
+
+rand = ''.join(random.choice(string.letters) for i in xrange(131597))
+samples = [
+ '',
+ 'test',
+ 'fooo',
+ 'same',
+ 'same',
+ 'Different and longer',
+ rand,
+ rand,
+ rand[:-1] + '_',
+ 'test'
+]
+
+
+class TestFile(TestWithTmpDir):
+ def test_file(self):
+ '''
+ Check that File.copy yields the proper content in the destination file
+ in all situations that trigger different code paths:
+ - different content
+ - different content of the same size
+ - same content
+ - long content
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+
+ for content in samples:
+ with open(src, 'wb') as tmp:
+ tmp.write(content)
+ # Ensure the destination file, when it exists, is older than the
+ # source
+ if os.path.exists(dest):
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, 'rb').read())
+ self.assertEqual(content, f.open().read())
+ self.assertEqual(content, f.open().read())
+
+ def test_file_dest(self):
+ '''
+ Similar to test_file, but for a destination object instead of
+ a destination file. This ensures the destination object is being
+ used properly by File.copy, ensuring that other subclasses of Dest
+ will work.
+ '''
+ src = self.tmppath('src')
+ dest = MockDest()
+
+ for content in samples:
+ with open(src, 'wb') as tmp:
+ tmp.write(content)
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, dest.getvalue())
+
+ def test_file_open(self):
+ '''
+ Test whether File.open returns an appropriately reset file object.
+ '''
+ src = self.tmppath('src')
+ content = ''.join(samples)
+ with open(src, 'wb') as tmp:
+ tmp.write(content)
+
+ f = File(src)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_file_no_write(self):
+ '''
+ Test various conditions where File.copy is expected not to write
+ in the destination file.
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('test')
+
+ # Initial copy
+ f = File(src)
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When the source file is newer, but with the same content, no copy
+ # should occur
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, 'wb') as tmp:
+ tmp.write('fooo')
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ f.copy(dest, skip_if_older=False)
+ self.assertEqual('fooo', open(dest, 'rb').read())
+
+
+class TestAbsoluteSymlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ AbsoluteSymlinkFile('/foo')
+
+ with self.assertRaisesRegexp(ValueError, 'Symlink target not absolute'):
+ AbsoluteSymlinkFile('./foo')
+
+ def test_symlink_file(self):
+ source = self.tmppath('test_path')
+ with open(source, 'wt') as fh:
+ fh.write('Hello world')
+
+ s = AbsoluteSymlinkFile(source)
+ dest = self.tmppath('symlink')
+ self.assertTrue(s.copy(dest))
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, 'Hello world')
+
+ def test_replace_file_with_symlink(self):
+ # If symlinks are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath('test_path')
+ with open(source, 'wt') as fh:
+ fh.write('source')
+
+ dest = self.tmppath('dest')
+ with open(dest, 'a'):
+ pass
+
+ s = AbsoluteSymlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, 'source')
+
+ def test_replace_symlink(self):
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath('source')
+ with open(source, 'a'):
+ pass
+
+ dest = self.tmppath('dest')
+
+ os.symlink(self.tmppath('bad'), dest)
+ self.assertTrue(os.path.islink(dest))
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertTrue(s.copy(dest))
+
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ def test_noop(self):
+ if not hasattr(os, 'symlink'):
+ return
+
+ source = self.tmppath('source')
+ dest = self.tmppath('dest')
+
+ with open(source, 'a'):
+ pass
+
+ os.symlink(source, dest)
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+class TestPreprocessedFile(TestWithTmpDir):
+ def test_preprocess(self):
+ '''
+ Test that copying the file invokes the preprocessor
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\ntest\n#endif')
+
+ f = PreprocessedFile(src, depfile_path=None, marker='#', defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual('test\n', open(dest, 'rb').read())
+
+ def test_preprocess_file_no_write(self):
+ '''
+ Test various conditions where PreprocessedFile.copy is expected not to
+ write in the destination file.
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+ depfile = self.tmppath('depfile')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\ntest\n#endif')
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=depfile, marker='#', defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ # Ensure subsequent copies won't trigger writes
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual('test\n', open(dest, 'rb').read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\nfooo\n#endif')
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual('test\n', open(dest, 'rb').read())
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ self.assertTrue(f.copy(dest, skip_if_older=False))
+ self.assertEqual('fooo\n', open(dest, 'rb').read())
+
+ def test_preprocess_file_dependencies(self):
+ '''
+ Test that the preprocess runs if the dependencies of the source change
+ '''
+ src = self.tmppath('src')
+ dest = self.tmppath('dest')
+ incl = self.tmppath('incl')
+ deps = self.tmppath('src.pp')
+
+ with open(src, 'wb') as tmp:
+ tmp.write('#ifdef FOO\ntest\n#endif')
+
+ with open(incl, 'wb') as tmp:
+ tmp.write('foo bar')
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=deps, marker='#', defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ # Update the source so it #includes the include file.
+ with open(src, 'wb') as tmp:
+ tmp.write('#include incl\n')
+ time = os.path.getmtime(dest) + 1
+ os.utime(src, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual('foo bar', open(dest, 'rb').read())
+
+ # If one of the dependencies changes, the file should be updated. The
+ # mtime of the dependency is set after the destination file, to avoid
+ # both files having the same time.
+ with open(incl, 'wb') as tmp:
+ tmp.write('quux')
+ time = os.path.getmtime(dest) + 1
+ os.utime(incl, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual('quux', open(dest, 'rb').read())
+
+ # Perform one final copy to confirm that we don't run the preprocessor
+ # again. We update the mtime of the destination so it's newer than the
+ # input files. This would "just work" if we weren't changing
+ time = os.path.getmtime(incl) + 1
+ os.utime(dest, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+
+ def test_replace_symlink(self):
+ '''
+ Test that if the destination exists, and is a symlink, the target of
+ the symlink is not overwritten by the preprocessor output.
+ '''
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath('source')
+ dest = self.tmppath('dest')
+ pp_source = self.tmppath('pp_in')
+ deps = self.tmppath('deps')
+
+ with open(source, 'a'):
+ pass
+
+ os.symlink(source, dest)
+ self.assertTrue(os.path.islink(dest))
+
+ with open(pp_source, 'wb') as tmp:
+ tmp.write('#define FOO\nPREPROCESSED')
+
+ f = PreprocessedFile(pp_source, depfile_path=deps, marker='#',
+ defines={'FOO': True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual('PREPROCESSED', open(dest, 'rb').read())
+ self.assertFalse(os.path.islink(dest))
+ self.assertEqual('', open(source, 'rb').read())
+
+class TestExistingFile(TestWithTmpDir):
+ def test_required_missing_dest(self):
+ with self.assertRaisesRegexp(ErrorMessage, 'Required existing file'):
+ f = ExistingFile(required=True)
+ f.copy(self.tmppath('dest'))
+
+ def test_required_existing_dest(self):
+ p = self.tmppath('dest')
+ with open(p, 'a'):
+ pass
+
+ f = ExistingFile(required=True)
+ f.copy(p)
+
+ def test_optional_missing_dest(self):
+ f = ExistingFile(required=False)
+ f.copy(self.tmppath('dest'))
+
+ def test_optional_existing_dest(self):
+ p = self.tmppath('dest')
+ with open(p, 'a'):
+ pass
+
+ f = ExistingFile(required=False)
+ f.copy(p)
+
+
+class TestGeneratedFile(TestWithTmpDir):
+ def test_generated_file(self):
+ '''
+ Check that GeneratedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ '''
+ dest = self.tmppath('dest')
+
+ for content in samples:
+ f = GeneratedFile(content)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, 'rb').read())
+
+ def test_generated_file_open(self):
+ '''
+ Test whether GeneratedFile.open returns an appropriately reset file
+ object.
+ '''
+ content = ''.join(samples)
+ f = GeneratedFile(content)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_generated_file_no_write(self):
+ '''
+ Test various conditions where GeneratedFile.copy is expected not to
+ write in the destination file.
+ '''
+ dest = self.tmppath('dest')
+
+ # Initial copy
+ f = GeneratedFile('test')
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When using a new instance with the same content, no copy should occur
+ f = GeneratedFile('test')
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = GeneratedFile('fooo')
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestDeflatedFile(TestWithTmpDir):
+ def test_deflated_file(self):
+ '''
+ Check that DeflatedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ '''
+ src = self.tmppath('src.jar')
+ dest = self.tmppath('dest')
+
+ contents = {}
+ with JarWriter(src) as jar:
+ for content in samples:
+ name = ''.join(random.choice(string.letters)
+ for i in xrange(8))
+ jar.add(name, content, compress=True)
+ contents[name] = content
+
+ for j in JarReader(src):
+ f = DeflatedFile(j)
+ f.copy(dest)
+ self.assertEqual(contents[j.filename], open(dest, 'rb').read())
+
+ def test_deflated_file_open(self):
+ '''
+ Test whether DeflatedFile.open returns an appropriately reset file
+ object.
+ '''
+ src = self.tmppath('src.jar')
+ content = ''.join(samples)
+ with JarWriter(src) as jar:
+ jar.add('content', content)
+
+ f = DeflatedFile(JarReader(src)['content'])
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_deflated_file_no_write(self):
+ '''
+ Test various conditions where DeflatedFile.copy is expected not to
+ write in the destination file.
+ '''
+ src = self.tmppath('src.jar')
+ dest = self.tmppath('dest')
+
+ with JarWriter(src) as jar:
+ jar.add('test', 'test')
+ jar.add('test2', 'test')
+ jar.add('fooo', 'fooo')
+
+ jar = JarReader(src)
+ # Initial copy
+ f = DeflatedFile(jar['test'])
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # When using a different file with the same content, no copy should
+ # occur
+ f = DeflatedFile(jar['test2'])
+ f.copy(DestNoWrite(dest))
+ self.assertEqual('test', open(dest, 'rb').read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = DeflatedFile(jar['fooo'])
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestManifestFile(TestWithTmpDir):
+ def test_manifest_file(self):
+ f = ManifestFile('chrome')
+ f.add(ManifestContent('chrome', 'global', 'toolkit/content/global/'))
+ f.add(ManifestResource('chrome', 'gre-resources', 'toolkit/res/'))
+ f.add(ManifestResource('chrome/pdfjs', 'pdfjs', './'))
+ f.add(ManifestContent('chrome/pdfjs', 'pdfjs', 'pdfjs'))
+ f.add(ManifestLocale('chrome', 'browser', 'en-US',
+ 'en-US/locale/browser/'))
+
+ f.copy(self.tmppath('chrome.manifest'))
+ self.assertEqual(open(self.tmppath('chrome.manifest')).readlines(), [
+ 'content global toolkit/content/global/\n',
+ 'resource gre-resources toolkit/res/\n',
+ 'resource pdfjs pdfjs/\n',
+ 'content pdfjs pdfjs/pdfjs\n',
+ 'locale browser en-US en-US/locale/browser/\n',
+ ])
+
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent('', 'global', 'toolkit/content/global/')
+ )
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestOverride('chrome', 'chrome://global/locale/netError.dtd',
+ 'chrome://browser/locale/netError.dtd')
+ )
+
+ f.remove(ManifestContent('chrome', 'global',
+ 'toolkit/content/global/'))
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent('chrome', 'global', 'toolkit/content/global/')
+ )
+
+ f.copy(self.tmppath('chrome.manifest'))
+ content = open(self.tmppath('chrome.manifest')).read()
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+# Compiled typelib for the following IDL:
+# interface foo;
+# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)]
+# interface bar {
+# void bar(in foo f);
+# };
+# We need to make this [scriptable] so it doesn't get deleted from the
+# typelib. We don't need to make the foo interfaces below [scriptable],
+# because they will be automatically included by virtue of being an
+# argument to a method of |bar|.
+bar_xpt = GeneratedFile(
+ b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
+ b'\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C' +
+ b'\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
+ b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F' +
+ b'\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00' +
+ b'\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00' +
+ b'\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
+ b'\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80'
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)]
+# interface foo {
+# void foo();
+# };
+foo_xpt = GeneratedFile(
+ b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
+ b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' +
+ b'\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA' +
+ b'\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' +
+ b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
+ b'\x05\x00\x80\x06\x00\x00\x00'
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)]
+# interface foo {
+# void foo();
+# };
+foo2_xpt = GeneratedFile(
+ b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
+ b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' +
+ b'\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39' +
+ b'\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' +
+ b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
+ b'\x05\x00\x80\x06\x00\x00\x00'
+)
+
+
+def read_interfaces(file):
+ return dict((i.name, i) for i in Typelib.read(file).interfaces)
+
+
+class TestXPTFile(TestWithTmpDir):
+ def test_xpt_file(self):
+ x = XPTFile()
+ x.add(foo_xpt)
+ x.add(bar_xpt)
+ x.copy(self.tmppath('interfaces.xpt'))
+
+ foo = read_interfaces(foo_xpt.open())
+ foo2 = read_interfaces(foo2_xpt.open())
+ bar = read_interfaces(bar_xpt.open())
+ linked = read_interfaces(self.tmppath('interfaces.xpt'))
+ self.assertEqual(foo['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x.remove(foo_xpt)
+ x.copy(self.tmppath('interfaces2.xpt'))
+ linked = read_interfaces(self.tmppath('interfaces2.xpt'))
+ self.assertEqual(bar['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x.add(foo_xpt)
+ x.copy(DestNoWrite(self.tmppath('interfaces.xpt')))
+ linked = read_interfaces(self.tmppath('interfaces.xpt'))
+ self.assertEqual(foo['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x = XPTFile()
+ x.add(foo2_xpt)
+ x.add(bar_xpt)
+ x.copy(self.tmppath('interfaces.xpt'))
+ linked = read_interfaces(self.tmppath('interfaces.xpt'))
+ self.assertEqual(foo2['foo'], linked['foo'])
+ self.assertEqual(bar['bar'], linked['bar'])
+
+ x = XPTFile()
+ x.add(foo_xpt)
+ x.add(foo2_xpt)
+ x.add(bar_xpt)
+ from xpt import DataError
+ self.assertRaises(DataError, x.copy, self.tmppath('interfaces.xpt'))
+
+
+class TestMinifiedProperties(TestWithTmpDir):
+ def test_minified_properties(self):
+ propLines = [
+ '# Comments are removed',
+ 'foo = bar',
+ '',
+ '# Another comment',
+ ]
+ prop = GeneratedFile('\n'.join(propLines))
+ self.assertEqual(MinifiedProperties(prop).open().readlines(),
+ ['foo = bar\n', '\n'])
+ open(self.tmppath('prop'), 'wb').write('\n'.join(propLines))
+ MinifiedProperties(File(self.tmppath('prop'))) \
+ .copy(self.tmppath('prop2'))
+ self.assertEqual(open(self.tmppath('prop2')).readlines(),
+ ['foo = bar\n', '\n'])
+
+
+class TestMinifiedJavaScript(TestWithTmpDir):
+ orig_lines = [
+ '// Comment line',
+ 'let foo = "bar";',
+ 'var bar = true;',
+ '',
+ '// Another comment',
+ ]
+
+ def test_minified_javascript(self):
+ orig_f = GeneratedFile('\n'.join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f)
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def _verify_command(self, code):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+ return [
+ sys.executable,
+ os.path.join(our_dir, 'support', 'minify_js_verify.py'),
+ code,
+ ]
+
+ def test_minified_verify_success(self):
+ orig_f = GeneratedFile('\n'.join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f,
+ verify_command=self._verify_command('0'))
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def test_minified_verify_failure(self):
+ orig_f = GeneratedFile('\n'.join(self.orig_lines))
+ errors.out = StringIO()
+ min_f = MinifiedJavaScript(orig_f,
+ verify_command=self._verify_command('1'))
+
+ mini_lines = min_f.open().readlines()
+ output = errors.out.getvalue()
+ errors.out = sys.stderr
+ self.assertEqual(output,
+ 'Warning: JS minification verification failed for <unknown>:\n'
+ 'Warning: Error message\n')
+ self.assertEqual(mini_lines, orig_f.open().readlines())
+
+
+class MatchTestTemplate(object):
+ def prepare_match_test(self, with_dotfiles=False):
+ self.add('bar')
+ self.add('foo/bar')
+ self.add('foo/baz')
+ self.add('foo/qux/1')
+ self.add('foo/qux/bar')
+ self.add('foo/qux/2/test')
+ self.add('foo/qux/2/test2')
+ if with_dotfiles:
+ self.add('foo/.foo')
+ self.add('foo/.bar/foo')
+
+ def do_match_test(self):
+ self.do_check('', [
+ 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('*', [
+ 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/qux', [
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/b*', ['foo/bar', 'foo/baz'])
+ self.do_check('baz', [])
+ self.do_check('foo/foo', [])
+ self.do_check('foo/*ar', ['foo/bar'])
+ self.do_check('*ar', ['bar'])
+ self.do_check('*/bar', ['foo/bar'])
+ self.do_check('foo/*ux', [
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/q*ux', [
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/*/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2'])
+ self.do_check('**/bar', ['bar', 'foo/bar', 'foo/qux/bar'])
+ self.do_check('foo/**/test', ['foo/qux/2/test'])
+ self.do_check('foo', [
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('foo/**', [
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('**/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2'])
+ self.do_check('**/foo', [
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'
+ ])
+ self.do_check('**/barbaz', [])
+ self.do_check('f**/bar', ['foo/bar'])
+
+ def do_finder_test(self, finder):
+ self.assertTrue(finder.contains('foo/.foo'))
+ self.assertTrue(finder.contains('foo/.bar'))
+ self.assertTrue('foo/.foo' in [f for f, c in
+ finder.find('foo/.foo')])
+ self.assertTrue('foo/.bar/foo' in [f for f, c in
+ finder.find('foo/.bar')])
+ self.assertEqual(sorted([f for f, c in finder.find('foo/.*')]),
+ ['foo/.bar/foo', 'foo/.foo'])
+ for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
+ self.assertFalse('foo/.foo' in [f for f, c in
+ finder.find(pattern)])
+ self.assertFalse('foo/.bar/foo' in [f for f, c in
+ finder.find(pattern)])
+ self.assertEqual(sorted([f for f, c in finder.find(pattern)]),
+ sorted([f for f, c in finder
+ if mozpath.match(f, pattern)]))
+
+
+def do_check(test, finder, pattern, result):
+ if result:
+ test.assertTrue(finder.contains(pattern))
+ else:
+ test.assertFalse(finder.contains(pattern))
+ test.assertEqual(sorted(list(f for f, c in finder.find(pattern))),
+ sorted(result))
+
+
+class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ ensureParentDir(self.tmppath(path))
+ open(self.tmppath(path), 'wb').write(path)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_file_finder(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir)
+ self.do_match_test()
+ self.do_finder_test(self.finder)
+
+ def test_get(self):
+ self.prepare_match_test()
+ finder = FileFinder(self.tmpdir)
+
+ self.assertIsNone(finder.get('does-not-exist'))
+ res = finder.get('bar')
+ self.assertIsInstance(res, File)
+ self.assertEqual(mozpath.normpath(res.path),
+ mozpath.join(self.tmpdir, 'bar'))
+
+ def test_ignored_dirs(self):
+ """Ignored directories should not have results returned."""
+ self.prepare_match_test()
+ self.add('fooz')
+
+ # Present to ensure prefix matching doesn't exclude.
+ self.add('foo/quxz')
+
+ self.finder = FileFinder(self.tmpdir, ignore=['foo/qux'])
+
+ self.do_check('**', ['bar', 'foo/bar', 'foo/baz', 'foo/quxz', 'fooz'])
+ self.do_check('foo/*', ['foo/bar', 'foo/baz', 'foo/quxz'])
+ self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz'])
+ self.do_check('foo/qux/**', [])
+ self.do_check('foo/qux/*', [])
+ self.do_check('foo/qux/bar', [])
+ self.do_check('foo/quxz', ['foo/quxz'])
+ self.do_check('fooz', ['fooz'])
+
+ def test_ignored_files(self):
+ """Ignored files should not have results returned."""
+ self.prepare_match_test()
+
+ # Be sure prefix match doesn't get ignored.
+ self.add('barz')
+
+ self.finder = FileFinder(self.tmpdir, ignore=['foo/bar', 'bar'])
+ self.do_check('**', ['barz', 'foo/baz', 'foo/qux/1', 'foo/qux/2/test',
+ 'foo/qux/2/test2', 'foo/qux/bar'])
+ self.do_check('foo/**', ['foo/baz', 'foo/qux/1', 'foo/qux/2/test',
+ 'foo/qux/2/test2', 'foo/qux/bar'])
+
+ def test_ignored_patterns(self):
+ """Ignore entries with patterns should be honored."""
+ self.prepare_match_test()
+
+ self.add('foo/quxz')
+
+ self.finder = FileFinder(self.tmpdir, ignore=['foo/qux/*'])
+ self.do_check('**', ['foo/bar', 'foo/baz', 'foo/quxz', 'bar'])
+ self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz'])
+
+ def test_dotfiles(self):
+ """Finder can find files beginning with . is configured."""
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True)
+ self.do_check('**', ['bar', 'foo/.foo', 'foo/.bar/foo',
+ 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
+ 'foo/qux/2/test', 'foo/qux/2/test2'])
+
+ def test_dotfiles_plus_ignore(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True,
+ ignore=['foo/.bar/**'])
+ self.do_check('foo/**', ['foo/.foo', 'foo/bar', 'foo/baz',
+ 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'])
+
+
+class TestJarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.jar.add(path, path, compress=True)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_jar_finder(self):
+ self.jar = JarWriter(file=self.tmppath('test.jar'))
+ self.prepare_match_test()
+ self.jar.finish()
+ reader = JarReader(file=self.tmppath('test.jar'))
+ self.finder = JarFinder(self.tmppath('test.jar'), reader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), DeflatedFile)
+
+class TestTarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.tar.addfile(tarfile.TarInfo(name=path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_tar_finder(self):
+ self.tar = tarfile.open(name=self.tmppath('test.tar.bz2'),
+ mode='w:bz2')
+ self.prepare_match_test()
+ self.tar.close()
+ with tarfile.open(name=self.tmppath('test.tar.bz2'),
+ mode='r:bz2') as tarreader:
+ self.finder = TarFinder(self.tmppath('test.tar.bz2'), tarreader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), ExtractedTarFile)
+
+
+class TestComposedFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path, content=None):
+ # Put foo/qux files under $tmp/b.
+ if path.startswith('foo/qux/'):
+ real_path = mozpath.join('b', path[8:])
+ else:
+ real_path = mozpath.join('a', path)
+ ensureParentDir(self.tmppath(real_path))
+ if not content:
+ content = path
+ open(self.tmppath(real_path), 'wb').write(content)
+
+ def do_check(self, pattern, result):
+ if '*' in pattern:
+ return
+ do_check(self, self.finder, pattern, result)
+
+ def test_composed_finder(self):
+ self.prepare_match_test()
+ # Also add files in $tmp/a/foo/qux because ComposedFinder is
+ # expected to mask foo/qux entirely with content from $tmp/b.
+ ensureParentDir(self.tmppath('a/foo/qux/hoge'))
+ open(self.tmppath('a/foo/qux/hoge'), 'wb').write('hoge')
+ open(self.tmppath('a/foo/qux/bar'), 'wb').write('not the right content')
+ self.finder = ComposedFinder({
+ '': FileFinder(self.tmppath('a')),
+ 'foo/qux': FileFinder(self.tmppath('b')),
+ })
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), File)
+
+
+@unittest.skipUnless(hglib, 'hglib not available')
+class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir):
+ def setUp(self):
+ super(TestMercurialRevisionFinder, self).setUp()
+ hglib.init(self.tmpdir)
+
+ def add(self, path):
+ c = hglib.open(self.tmpdir)
+ ensureParentDir(self.tmppath(path))
+ with open(self.tmppath(path), 'wb') as fh:
+ fh.write(path)
+ c.add(self.tmppath(path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def _get_finder(self, *args, **kwargs):
+ return MercurialRevisionFinder(*args, **kwargs)
+
+ def test_default_revision(self):
+ self.prepare_match_test()
+ c = hglib.open(self.tmpdir)
+ c.commit('initial commit')
+ self.finder = self._get_finder(self.tmpdir)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get('does-not-exist'))
+ self.assertIsInstance(self.finder.get('bar'), MercurialFile)
+
+ def test_old_revision(self):
+ c = hglib.open(self.tmpdir)
+ with open(self.tmppath('foo'), 'wb') as fh:
+ fh.write('foo initial')
+ c.add(self.tmppath('foo'))
+ c.commit('initial')
+
+ with open(self.tmppath('foo'), 'wb') as fh:
+ fh.write('foo second')
+ with open(self.tmppath('bar'), 'wb') as fh:
+ fh.write('bar second')
+ c.add(self.tmppath('bar'))
+ c.commit('second')
+ # This wipes out the working directory, ensuring the finder isn't
+ # finding anything from the filesystem.
+ c.rawcommand(['update', 'null'])
+
+ finder = self._get_finder(self.tmpdir, 0)
+ f = finder.get('foo')
+ self.assertEqual(f.read(), 'foo initial')
+ self.assertEqual(f.read(), 'foo initial', 'read again for good measure')
+ self.assertIsNone(finder.get('bar'))
+
+ finder = MercurialRevisionFinder(self.tmpdir, rev=1)
+ f = finder.get('foo')
+ self.assertEqual(f.read(), 'foo second')
+ f = finder.get('bar')
+ self.assertEqual(f.read(), 'bar second')
+
+ def test_recognize_repo_paths(self):
+ c = hglib.open(self.tmpdir)
+ with open(self.tmppath('foo'), 'wb') as fh:
+ fh.write('initial')
+ c.add(self.tmppath('foo'))
+ c.commit('initial')
+ c.rawcommand(['update', 'null'])
+
+ finder = self._get_finder(self.tmpdir, 0,
+ recognize_repo_paths=True)
+ with self.assertRaises(NotImplementedError):
+ list(finder.find(''))
+
+ with self.assertRaises(ValueError):
+ finder.get('foo')
+ with self.assertRaises(ValueError):
+ finder.get('')
+
+ f = finder.get(self.tmppath('foo'))
+ self.assertIsInstance(f, MercurialFile)
+ self.assertEqual(f.read(), 'initial')
+
+
+@unittest.skipUnless(MercurialNativeRevisionFinder, 'hgnative not available')
+class TestMercurialNativeRevisionFinder(TestMercurialRevisionFinder):
+ def _get_finder(self, *args, **kwargs):
+ return MercurialNativeRevisionFinder(*args, **kwargs)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py
new file mode 100644
index 000000000..b785d014a
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_manifests.py
@@ -0,0 +1,375 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import unicode_literals
+
+import os
+
+import mozunit
+
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+)
+from mozpack.manifests import (
+ InstallManifest,
+ UnreadableInstallManifest,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestInstallManifest(TestWithTmpDir):
+ def test_construct(self):
+ m = InstallManifest()
+ self.assertEqual(len(m), 0)
+
+ def test_malformed(self):
+ f = self.tmppath('manifest')
+ open(f, 'wb').write('junk\n')
+ with self.assertRaises(UnreadableInstallManifest):
+ m = InstallManifest(f)
+
+ def test_adds(self):
+ m = InstallManifest()
+ m.add_symlink('s_source', 's_dest')
+ m.add_copy('c_source', 'c_dest')
+ m.add_required_exists('e_dest')
+ m.add_optional_exists('o_dest')
+ m.add_pattern_symlink('ps_base', 'ps/*', 'ps_dest')
+ m.add_pattern_copy('pc_base', 'pc/**', 'pc_dest')
+ m.add_preprocess('p_source', 'p_dest', 'p_source.pp')
+ m.add_content('content', 'content')
+
+ self.assertEqual(len(m), 8)
+ self.assertIn('s_dest', m)
+ self.assertIn('c_dest', m)
+ self.assertIn('p_dest', m)
+ self.assertIn('e_dest', m)
+ self.assertIn('o_dest', m)
+ self.assertIn('content', m)
+
+ with self.assertRaises(ValueError):
+ m.add_symlink('s_other', 's_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_copy('c_other', 'c_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_preprocess('p_other', 'p_dest', 'p_other.pp')
+
+ with self.assertRaises(ValueError):
+ m.add_required_exists('e_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_optional_exists('o_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_symlink('ps_base', 'ps/*', 'ps_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_copy('pc_base', 'pc/**', 'pc_dest')
+
+ with self.assertRaises(ValueError):
+ m.add_content('content', 'content')
+
+ def _get_test_manifest(self):
+ m = InstallManifest()
+ m.add_symlink(self.tmppath('s_source'), 's_dest')
+ m.add_copy(self.tmppath('c_source'), 'c_dest')
+ m.add_preprocess(self.tmppath('p_source'), 'p_dest', self.tmppath('p_source.pp'), '#', {'FOO':'BAR', 'BAZ':'QUX'})
+ m.add_required_exists('e_dest')
+ m.add_optional_exists('o_dest')
+ m.add_pattern_symlink('ps_base', '*', 'ps_dest')
+ m.add_pattern_copy('pc_base', '**', 'pc_dest')
+ m.add_content('the content\non\nmultiple lines', 'content')
+
+ return m
+
+ def test_serialization(self):
+ m = self._get_test_manifest()
+
+ p = self.tmppath('m')
+ m.write(path=p)
+ self.assertTrue(os.path.isfile(p))
+
+ with open(p, 'rb') as fh:
+ c = fh.read()
+
+ self.assertEqual(c.count('\n'), 9)
+
+ lines = c.splitlines()
+ self.assertEqual(len(lines), 9)
+
+ self.assertEqual(lines[0], '5')
+
+ m2 = InstallManifest(path=p)
+ self.assertEqual(m, m2)
+ p2 = self.tmppath('m2')
+ m2.write(path=p2)
+
+ with open(p2, 'rb') as fh:
+ c2 = fh.read()
+
+ self.assertEqual(c, c2)
+
+ def test_populate_registry(self):
+ m = self._get_test_manifest()
+ r = FileRegistry()
+ m.populate_registry(r)
+
+ self.assertEqual(len(r), 6)
+ self.assertEqual(r.paths(), ['c_dest', 'content', 'e_dest', 'o_dest',
+ 'p_dest', 's_dest'])
+
+ def test_pattern_expansion(self):
+ source = self.tmppath('source')
+ os.mkdir(source)
+ os.mkdir('%s/base' % source)
+ os.mkdir('%s/base/foo' % source)
+
+ with open('%s/base/foo/file1' % source, 'a'):
+ pass
+
+ with open('%s/base/foo/file2' % source, 'a'):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_symlink('%s/base' % source, '**', 'dest')
+
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2'])
+
+ def test_or(self):
+ m1 = self._get_test_manifest()
+ orig_length = len(m1)
+ m2 = InstallManifest()
+ m2.add_symlink('s_source2', 's_dest2')
+ m2.add_copy('c_source2', 'c_dest2')
+
+ m1 |= m2
+
+ self.assertEqual(len(m2), 2)
+ self.assertEqual(len(m1), orig_length + 2)
+
+ self.assertIn('s_dest2', m1)
+ self.assertIn('c_dest2', m1)
+
+ def test_copier_application(self):
+ dest = self.tmppath('dest')
+ os.mkdir(dest)
+
+ to_delete = self.tmppath('dest/to_delete')
+ with open(to_delete, 'a'):
+ pass
+
+ with open(self.tmppath('s_source'), 'wt') as fh:
+ fh.write('symlink!')
+
+ with open(self.tmppath('c_source'), 'wt') as fh:
+ fh.write('copy!')
+
+ with open(self.tmppath('p_source'), 'wt') as fh:
+ fh.write('#define FOO 1\npreprocess!')
+
+ with open(self.tmppath('dest/e_dest'), 'a'):
+ pass
+
+ with open(self.tmppath('dest/o_dest'), 'a'):
+ pass
+
+ m = self._get_test_manifest()
+ c = FileCopier()
+ m.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath('dest/s_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/c_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/p_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/e_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/o_dest')))
+ self.assertTrue(os.path.exists(self.tmppath('dest/content')))
+ self.assertFalse(os.path.exists(to_delete))
+
+ with open(self.tmppath('dest/s_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'symlink!')
+
+ with open(self.tmppath('dest/c_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'copy!')
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'preprocess!')
+
+ self.assertEqual(result.updated_files, set(self.tmppath(p) for p in (
+ 'dest/s_dest', 'dest/c_dest', 'dest/p_dest', 'dest/content')))
+ self.assertEqual(result.existing_files,
+ set([self.tmppath('dest/e_dest'), self.tmppath('dest/o_dest')]))
+ self.assertEqual(result.removed_files, {to_delete})
+ self.assertEqual(result.removed_directories, set())
+
+ def test_preprocessor(self):
+ manifest = self.tmppath('m')
+ deps = self.tmppath('m.pp')
+ dest = self.tmppath('dest')
+ include = self.tmppath('p_incl')
+
+ with open(include, 'wt') as fh:
+ fh.write('#define INCL\n')
+ time = os.path.getmtime(include) - 3
+ os.utime(include, (time, time))
+
+ with open(self.tmppath('p_source'), 'wt') as fh:
+ fh.write('#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n')
+ fh.write('#ifdef DEPTEST\nPASS2\n#endif\n')
+ fh.write('#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n')
+ time = os.path.getmtime(self.tmppath('p_source')) - 3
+ os.utime(self.tmppath('p_source'), (time, time))
+
+ # Create and write a manifest with the preprocessed file, then apply it.
+ # This should write out our preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'})
+ m.write(path=manifest)
+
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath('dest/p_dest')))
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'PASS1\n')
+
+ # Create a second manifest with the preprocessed file, then apply it.
+ # Since this manifest does not exist on the disk, there should not be a
+ # dependency on it, and the preprocessed file should not be modified.
+ m2 = InstallManifest()
+ m2.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'DEPTEST':True})
+ c = FileCopier()
+ m2.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertFalse(self.tmppath('dest/p_dest') in result.updated_files)
+ self.assertTrue(self.tmppath('dest/p_dest') in result.existing_files)
+
+ # Write out the second manifest, then load it back in from the disk.
+ # This should add the dependency on the manifest file, so our
+ # preprocessed file should be regenerated with the new defines.
+ # We also set the mtime on the destination file back, so it will be
+ # older than the manifest file.
+ m2.write(path=manifest)
+ time = os.path.getmtime(manifest) - 1
+ os.utime(self.tmppath('dest/p_dest'), (time, time))
+ m2 = InstallManifest(path=manifest)
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'PASS2\n')
+
+ # Set the time on the manifest back, so it won't be picked up as
+ # modified in the next test
+ time = os.path.getmtime(manifest) - 1
+ os.utime(manifest, (time, time))
+
+ # Update the contents of a file included by the source file. This should
+ # cause the destination to be regenerated.
+ with open(include, 'wt') as fh:
+ fh.write('#define INCLTEST\n')
+
+ time = os.path.getmtime(include) - 1
+ os.utime(self.tmppath('dest/p_dest'), (time, time))
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath('dest/p_dest'), 'rt') as fh:
+ self.assertEqual(fh.read(), 'PASS2\nPASS3\n')
+
+ def test_preprocessor_dependencies(self):
+ manifest = self.tmppath('m')
+ deps = self.tmppath('m.pp')
+ dest = self.tmppath('dest')
+ source = self.tmppath('p_source')
+ destfile = self.tmppath('dest/p_dest')
+ include = self.tmppath('p_incl')
+ os.mkdir(dest)
+
+ with open(source, 'wt') as fh:
+ fh.write('#define SRC\nSOURCE\n')
+ time = os.path.getmtime(source) - 3
+ os.utime(source, (time, time))
+
+ with open(include, 'wt') as fh:
+ fh.write('INCLUDE\n')
+ time = os.path.getmtime(source) - 3
+ os.utime(include, (time, time))
+
+ # Create and write a manifest with the preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(source, 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'})
+ m.write(path=manifest)
+
+ time = os.path.getmtime(source) - 5
+ os.utime(manifest, (time, time))
+
+ # Now read the manifest back in, and apply it. This should write out
+ # our preprocessed file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(destfile, 'rt') as fh:
+ self.assertEqual(fh.read(), 'SOURCE\n')
+
+ # Next, modify the source to #INCLUDE another file.
+ with open(source, 'wt') as fh:
+ fh.write('SOURCE\n#include p_incl\n')
+ time = os.path.getmtime(source) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that it also reads the newly included
+ # file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, 'rt') as fh:
+ self.assertEqual(fh.read(), 'SOURCE\nINCLUDE\n')
+
+ # Set the time on the source file back, so it won't be picked up as
+ # modified in the next test.
+ time = os.path.getmtime(source) - 1
+ os.utime(source, (time, time))
+
+ # Now, modify the include file (but not the original source).
+ with open(include, 'wt') as fh:
+ fh.write('INCLUDE MODIFIED\n')
+ time = os.path.getmtime(include) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that the change to the include file
+ # is detected. That should cause the preprocessor to run again.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, 'rt') as fh:
+ self.assertEqual(fh.read(), 'SOURCE\nINCLUDE MODIFIED\n')
+
+ # ORing an InstallManifest should copy file dependencies
+ m = InstallManifest()
+ m |= InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ e = c._files['p_dest']
+ self.assertEqual(e.extra_depends, [manifest])
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py
new file mode 100644
index 000000000..948403006
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -0,0 +1,342 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.files import FileFinder
+from mozpack.mozjar import (
+ JarReaderError,
+ JarWriterError,
+ JarStruct,
+ JarReader,
+ JarWriter,
+ Deflater,
+ JarLog,
+)
+from collections import OrderedDict
+from mozpack.test.test_files import MockDest
+import unittest
+import mozunit
+from cStringIO import StringIO
+from urllib import pathname2url
+import mozpack.path as mozpath
+import os
+
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, 'data')
+
+
+class TestJarStruct(unittest.TestCase):
+ class Foo(JarStruct):
+ MAGIC = 0x01020304
+ STRUCT = OrderedDict([
+ ('foo', 'uint32'),
+ ('bar', 'uint16'),
+ ('qux', 'uint16'),
+ ('length', 'uint16'),
+ ('length2', 'uint16'),
+ ('string', 'length'),
+ ('string2', 'length2'),
+ ])
+
+ def test_jar_struct(self):
+ foo = TestJarStruct.Foo()
+ self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC)
+ self.assertEqual(foo['foo'], 0)
+ self.assertEqual(foo['bar'], 0)
+ self.assertEqual(foo['qux'], 0)
+ self.assertFalse('length' in foo)
+ self.assertFalse('length2' in foo)
+ self.assertEqual(foo['string'], '')
+ self.assertEqual(foo['string2'], '')
+
+ self.assertEqual(foo.size, 16)
+
+ foo['foo'] = 0x42434445
+ foo['bar'] = 0xabcd
+ foo['qux'] = 0xef01
+ foo['string'] = 'abcde'
+ foo['string2'] = 'Arbitrarily long string'
+
+ serialized = b'\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef' + \
+ b'\x05\x00\x17\x00abcdeArbitrarily long string'
+ self.assertEqual(foo.size, len(serialized))
+ foo_serialized = foo.serialize()
+ self.assertEqual(foo_serialized, serialized)
+
+ def do_test_read_jar_struct(self, data):
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data)
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:])
+
+ foo = TestJarStruct.Foo(data[1:])
+ self.assertEqual(foo['foo'], 0x45444342)
+ self.assertEqual(foo['bar'], 0xcdab)
+ self.assertEqual(foo['qux'], 0x01ef)
+ self.assertFalse('length' in foo)
+ self.assertFalse('length2' in foo)
+ self.assertEqual(foo['string'], '012345')
+ self.assertEqual(foo['string2'], '67')
+
+ def test_read_jar_struct(self):
+ data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \
+ b'\x01\x06\x00\x02\x0001234567890'
+ self.do_test_read_jar_struct(data)
+
+ def test_read_jar_struct_memoryview(self):
+ data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \
+ b'\x01\x06\x00\x02\x0001234567890'
+ self.do_test_read_jar_struct(memoryview(data))
+
+
+class TestDeflater(unittest.TestCase):
+ def wrap(self, data):
+ return data
+
+ def test_deflater_no_compress(self):
+ deflater = Deflater(False)
+ deflater.write(self.wrap('abc'))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, 'abc')
+ self.assertEqual(deflater.crc32, 0x352441c2)
+
+ def test_deflater_compress_no_gain(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap('abc'))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, 'abc')
+ self.assertEqual(deflater.crc32, 0x352441c2)
+
+ def test_deflater_compress(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz'))
+ self.assertTrue(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 26)
+ self.assertNotEqual(deflater.compressed_size,
+ deflater.uncompressed_size)
+ self.assertEqual(deflater.crc32, 0xd46b97ed)
+ # The CRC is the same as when not compressed
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz'))
+ self.assertEqual(deflater.crc32, 0xd46b97ed)
+
+
+class TestDeflaterMemoryView(TestDeflater):
+ def wrap(self, data):
+ return memoryview(data)
+
+
+class TestJar(unittest.TestCase):
+ optimize = False
+
+ def test_jar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+ jar.add('foo', 'foo')
+ self.assertRaises(JarWriterError, jar.add, 'foo', 'bar')
+ jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
+ jar.add('baz\\backslash', 'aaaaaaaaaaaaaaa')
+
+ files = [j for j in JarReader(fileobj=s)]
+
+ self.assertEqual(files[0].filename, 'foo')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'foo')
+
+ self.assertEqual(files[1].filename, 'bar')
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertEqual(files[2].filename, 'baz/qux')
+ self.assertFalse(files[2].compressed)
+ self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ if os.sep == '\\':
+ self.assertEqual(files[3].filename, 'baz/backslash',
+ 'backslashes in filenames on Windows should get normalized')
+ else:
+ self.assertEqual(files[3].filename, 'baz\\backslash',
+ 'backslashes in filenames on POSIX platform are untouched')
+
+ s = MockDest()
+ with JarWriter(fileobj=s, compress=False,
+ optimize=self.optimize) as jar:
+ jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.add('foo', 'foo')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'bar')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertEqual(files[1].filename, 'foo')
+ self.assertFalse(files[1].compressed)
+ self.assertEqual(files[1].read(), 'foo')
+
+ self.assertEqual(files[2].filename, 'baz/qux')
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertTrue('bar' in jar)
+ self.assertTrue('foo' in jar)
+ self.assertFalse('baz' in jar)
+ self.assertTrue('baz/qux' in jar)
+ self.assertTrue(jar['bar'], files[1])
+ self.assertTrue(jar['foo'], files[0])
+ self.assertTrue(jar['baz/qux'], files[2])
+
+ s.seek(0)
+ jar = JarReader(fileobj=s)
+ self.assertTrue('bar' in jar)
+ self.assertTrue('foo' in jar)
+ self.assertFalse('baz' in jar)
+ self.assertTrue('baz/qux' in jar)
+
+ files[0].seek(0)
+ self.assertEqual(jar['bar'].filename, files[0].filename)
+ self.assertEqual(jar['bar'].compressed, files[0].compressed)
+ self.assertEqual(jar['bar'].read(), files[0].read())
+
+ files[1].seek(0)
+ self.assertEqual(jar['foo'].filename, files[1].filename)
+ self.assertEqual(jar['foo'].compressed, files[1].compressed)
+ self.assertEqual(jar['foo'].read(), files[1].read())
+
+ files[2].seek(0)
+ self.assertEqual(jar['baz/qux'].filename, files[2].filename)
+ self.assertEqual(jar['baz/qux'].compressed, files[2].compressed)
+ self.assertEqual(jar['baz/qux'].read(), files[2].read())
+
+ def test_rejar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
+
+ new = MockDest()
+ with JarWriter(fileobj=new, optimize=self.optimize) as jar:
+ for j in JarReader(fileobj=s):
+ jar.add(j.filename, j)
+
+ jar = JarReader(fileobj=new)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'foo')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'foo')
+
+ self.assertEqual(files[1].filename, 'bar')
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ self.assertEqual(files[2].filename, 'baz/qux')
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ def test_add_from_finder(self):
+ s = MockDest()
+ with JarWriter(fileobj=s, optimize=self.optimize) as jar:
+ finder = FileFinder(test_data_path)
+ for p, f in finder.find('test_data'):
+ jar.add('test_data', f)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'test_data')
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), 'test_data')
+
+
+class TestOptimizeJar(TestJar):
+ optimize = True
+
+
+class TestPreload(unittest.TestCase):
+ def test_preload(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, None)
+
+ with JarWriter(fileobj=s) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
+ jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
+ jar.preload(['baz/qux', 'bar'])
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, 'bar')
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, 'baz/qux')
+ self.assertEqual(files[1].filename, 'bar')
+ self.assertEqual(files[2].filename, 'foo')
+
+
+class TestJarLog(unittest.TestCase):
+ def test_jarlog(self):
+ base = 'file:' + pathname2url(os.path.abspath(os.curdir))
+ s = StringIO('\n'.join([
+ base + '/bar/baz.jar first',
+ base + '/bar/baz.jar second',
+ base + '/bar/baz.jar third',
+ base + '/bar/baz.jar second',
+ base + '/bar/baz.jar second',
+ 'jar:' + base + '/qux.zip!/omni.ja stuff',
+ base + '/bar/baz.jar first',
+ 'jar:' + base + '/qux.zip!/omni.ja other/stuff',
+ 'jar:' + base + '/qux.zip!/omni.ja stuff',
+ base + '/bar/baz.jar third',
+ 'jar:jar:' + base + '/qux.zip!/baz/baz.jar!/omni.ja nested/stuff',
+ 'jar:jar:jar:' + base + '/qux.zip!/baz/baz.jar!/foo.zip!/omni.ja' +
+ ' deeply/nested/stuff',
+ ]))
+ log = JarLog(fileobj=s)
+ canonicalize = lambda p: \
+ mozpath.normsep(os.path.normcase(os.path.realpath(p)))
+ baz_jar = canonicalize('bar/baz.jar')
+ qux_zip = canonicalize('qux.zip')
+ self.assertEqual(set(log.keys()), set([
+ baz_jar,
+ (qux_zip, 'omni.ja'),
+ (qux_zip, 'baz/baz.jar', 'omni.ja'),
+ (qux_zip, 'baz/baz.jar', 'foo.zip', 'omni.ja'),
+ ]))
+ self.assertEqual(log[baz_jar], [
+ 'first',
+ 'second',
+ 'third',
+ ])
+ self.assertEqual(log[(qux_zip, 'omni.ja')], [
+ 'stuff',
+ 'other/stuff',
+ ])
+ self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'omni.ja')],
+ ['nested/stuff'])
+ self.assertEqual(log[(qux_zip, 'baz/baz.jar', 'foo.zip',
+ 'omni.ja')], ['deeply/nested/stuff'])
+
+ # The above tests also indirectly check the value returned by
+ # JarLog.canonicalize for various jar: and file: urls, but
+ # JarLog.canonicalize also supports plain paths.
+ self.assertEqual(JarLog.canonicalize(os.path.abspath('bar/baz.jar')),
+ baz_jar)
+ self.assertEqual(JarLog.canonicalize('bar/baz.jar'), baz_jar)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py
new file mode 100644
index 000000000..397f40538
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager.py
@@ -0,0 +1,490 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+import os
+from mozpack.packager import (
+ preprocess_manifest,
+ CallDeque,
+ Component,
+ SimplePackager,
+ SimpleManifestSink,
+)
+from mozpack.files import GeneratedFile
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestContent,
+ ManifestResource,
+)
+from mozunit import MockedOpen
+from mozbuild.preprocessor import Preprocessor
+from mozpack.errors import (
+ errors,
+ ErrorMessage,
+)
+import mozpack.path as mozpath
+
+MANIFEST = '''
+bar/*
+[foo]
+foo/*
+-foo/bar
+chrome.manifest
+[zot destdir="destdir"]
+foo/zot
+; comment
+#ifdef baz
+[baz]
+baz@SUFFIX@
+#endif
+'''
+
+
+class TestPreprocessManifest(unittest.TestCase):
+ MANIFEST_PATH = os.path.join(os.path.abspath(os.curdir), 'manifest')
+
+ EXPECTED_LOG = [
+ ((MANIFEST_PATH, 2), 'add', '', 'bar/*'),
+ ((MANIFEST_PATH, 4), 'add', 'foo', 'foo/*'),
+ ((MANIFEST_PATH, 5), 'remove', 'foo', 'foo/bar'),
+ ((MANIFEST_PATH, 6), 'add', 'foo', 'chrome.manifest'),
+ ((MANIFEST_PATH, 8), 'add', 'zot destdir="destdir"', 'foo/zot'),
+ ]
+
+ def setUp(self):
+ class MockSink(object):
+ def __init__(self):
+ self.log = []
+
+ def add(self, component, path):
+ self._log(errors.get_context(), 'add', repr(component), path)
+
+ def remove(self, component, path):
+ self._log(errors.get_context(), 'remove', repr(component), path)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+ self.sink = MockSink()
+
+ def test_preprocess_manifest(self):
+ with MockedOpen({'manifest': MANIFEST}):
+ preprocess_manifest(self.sink, 'manifest')
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG)
+
+ def test_preprocess_manifest_missing_define(self):
+ with MockedOpen({'manifest': MANIFEST}):
+ self.assertRaises(
+ Preprocessor.Error,
+ preprocess_manifest,
+ self.sink,
+ 'manifest',
+ {'baz': 1}
+ )
+
+ def test_preprocess_manifest_defines(self):
+ with MockedOpen({'manifest': MANIFEST}):
+ preprocess_manifest(self.sink, 'manifest',
+ {'baz': 1, 'SUFFIX': '.exe'})
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG +
+ [((self.MANIFEST_PATH, 12), 'add', 'baz', 'baz.exe')])
+
+
+class MockFinder(object):
+ def __init__(self, files):
+ self.files = files
+ self.log = []
+
+ def find(self, path):
+ self.log.append(path)
+ for f in sorted(self.files):
+ if mozpath.match(f, path):
+ yield f, self.files[f]
+
+ def __iter__(self):
+ return self.find('')
+
+
+class MockFormatter(object):
+ def __init__(self):
+ self.log = []
+
+ def add_base(self, *args):
+ self._log(errors.get_context(), 'add_base', *args)
+
+ def add_manifest(self, *args):
+ self._log(errors.get_context(), 'add_manifest', *args)
+
+ def add_interfaces(self, *args):
+ self._log(errors.get_context(), 'add_interfaces', *args)
+
+ def add(self, *args):
+ self._log(errors.get_context(), 'add', *args)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+
+class TestSimplePackager(unittest.TestCase):
+ def test_simple_packager(self):
+ class GeneratedFileWithPath(GeneratedFile):
+ def __init__(self, path, content):
+ GeneratedFile.__init__(self, content)
+ self.path = path
+
+ formatter = MockFormatter()
+ packager = SimplePackager(formatter)
+ curdir = os.path.abspath(os.curdir)
+ file = GeneratedFileWithPath(os.path.join(curdir, 'foo',
+ 'bar.manifest'),
+ 'resource bar bar/\ncontent bar bar/')
+ with errors.context('manifest', 1):
+ packager.add('foo/bar.manifest', file)
+
+ file = GeneratedFileWithPath(os.path.join(curdir, 'foo',
+ 'baz.manifest'),
+ 'resource baz baz/')
+ with errors.context('manifest', 2):
+ packager.add('bar/baz.manifest', file)
+
+ with errors.context('manifest', 3):
+ packager.add('qux/qux.manifest',
+ GeneratedFile(''.join([
+ 'resource qux qux/\n',
+ 'binary-component qux.so\n',
+ ])))
+ bar_xpt = GeneratedFile('bar.xpt')
+ qux_xpt = GeneratedFile('qux.xpt')
+ foo_html = GeneratedFile('foo_html')
+ bar_html = GeneratedFile('bar_html')
+ with errors.context('manifest', 4):
+ packager.add('foo/bar.xpt', bar_xpt)
+ with errors.context('manifest', 5):
+ packager.add('foo/bar/foo.html', foo_html)
+ packager.add('foo/bar/bar.html', bar_html)
+
+ file = GeneratedFileWithPath(os.path.join(curdir, 'foo.manifest'),
+ ''.join([
+ 'manifest foo/bar.manifest\n',
+ 'manifest bar/baz.manifest\n',
+ ]))
+ with errors.context('manifest', 6):
+ packager.add('foo.manifest', file)
+ with errors.context('manifest', 7):
+ packager.add('foo/qux.xpt', qux_xpt)
+
+ file = GeneratedFileWithPath(os.path.join(curdir, 'addon',
+ 'chrome.manifest'),
+ 'resource hoge hoge/')
+ with errors.context('manifest', 8):
+ packager.add('addon/chrome.manifest', file)
+
+ install_rdf = GeneratedFile('<RDF></RDF>')
+ with errors.context('manifest', 9):
+ packager.add('addon/install.rdf', install_rdf)
+
+ with errors.context('manifest', 10):
+ packager.add('addon2/install.rdf', install_rdf)
+ packager.add('addon2/chrome.manifest',
+ GeneratedFile('binary-component addon2.so'))
+
+ with errors.context('manifest', 11):
+ packager.add('addon3/install.rdf', install_rdf)
+ packager.add('addon3/chrome.manifest', GeneratedFile(
+ 'manifest components/components.manifest'))
+ packager.add('addon3/components/components.manifest',
+ GeneratedFile('binary-component addon3.so'))
+
+ with errors.context('manifest', 12):
+ install_rdf_addon4 = GeneratedFile(
+ '<RDF>\n<...>\n<em:unpack>true</em:unpack>\n<...>\n</RDF>')
+ packager.add('addon4/install.rdf', install_rdf_addon4)
+
+ with errors.context('manifest', 13):
+ install_rdf_addon5 = GeneratedFile(
+ '<RDF>\n<...>\n<em:unpack>false</em:unpack>\n<...>\n</RDF>')
+ packager.add('addon5/install.rdf', install_rdf_addon5)
+
+ with errors.context('manifest', 14):
+ install_rdf_addon6 = GeneratedFile(
+ '<RDF>\n<... em:unpack=true>\n<...>\n</RDF>')
+ packager.add('addon6/install.rdf', install_rdf_addon6)
+
+ with errors.context('manifest', 15):
+ install_rdf_addon7 = GeneratedFile(
+ '<RDF>\n<... em:unpack=false>\n<...>\n</RDF>')
+ packager.add('addon7/install.rdf', install_rdf_addon7)
+
+ with errors.context('manifest', 16):
+ install_rdf_addon8 = GeneratedFile(
+ '<RDF>\n<... em:unpack="true">\n<...>\n</RDF>')
+ packager.add('addon8/install.rdf', install_rdf_addon8)
+
+ with errors.context('manifest', 17):
+ install_rdf_addon9 = GeneratedFile(
+ '<RDF>\n<... em:unpack="false">\n<...>\n</RDF>')
+ packager.add('addon9/install.rdf', install_rdf_addon9)
+
+ with errors.context('manifest', 18):
+ install_rdf_addon10 = GeneratedFile(
+ '<RDF>\n<... em:unpack=\'true\'>\n<...>\n</RDF>')
+ packager.add('addon10/install.rdf', install_rdf_addon10)
+
+ with errors.context('manifest', 19):
+ install_rdf_addon11 = GeneratedFile(
+ '<RDF>\n<... em:unpack=\'false\'>\n<...>\n</RDF>')
+ packager.add('addon11/install.rdf', install_rdf_addon11)
+
+ self.assertEqual(formatter.log, [])
+
+ with errors.context('dummy', 1):
+ packager.close()
+ self.maxDiff = None
+ # The formatter is expected to reorder the manifest entries so that
+ # chrome entries appear before the others.
+ self.assertEqual(formatter.log, [
+ (('dummy', 1), 'add_base', '', False),
+ (('dummy', 1), 'add_base', 'addon', True),
+ (('dummy', 1), 'add_base', 'addon10', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon11', True),
+ (('dummy', 1), 'add_base', 'addon2', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon3', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon4', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon5', True),
+ (('dummy', 1), 'add_base', 'addon6', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon7', True),
+ (('dummy', 1), 'add_base', 'addon8', 'unpacked'),
+ (('dummy', 1), 'add_base', 'addon9', True),
+ (('dummy', 1), 'add_base', 'qux', False),
+ ((os.path.join(curdir, 'foo', 'bar.manifest'), 2),
+ 'add_manifest', ManifestContent('foo', 'bar', 'bar/')),
+ ((os.path.join(curdir, 'foo', 'bar.manifest'), 1),
+ 'add_manifest', ManifestResource('foo', 'bar', 'bar/')),
+ (('bar/baz.manifest', 1),
+ 'add_manifest', ManifestResource('bar', 'baz', 'baz/')),
+ (('qux/qux.manifest', 1),
+ 'add_manifest', ManifestResource('qux', 'qux', 'qux/')),
+ (('qux/qux.manifest', 2),
+ 'add_manifest', ManifestBinaryComponent('qux', 'qux.so')),
+ (('manifest', 4), 'add_interfaces', 'foo/bar.xpt', bar_xpt),
+ (('manifest', 7), 'add_interfaces', 'foo/qux.xpt', qux_xpt),
+ ((os.path.join(curdir, 'addon', 'chrome.manifest'), 1),
+ 'add_manifest', ManifestResource('addon', 'hoge', 'hoge/')),
+ (('addon2/chrome.manifest', 1), 'add_manifest',
+ ManifestBinaryComponent('addon2', 'addon2.so')),
+ (('addon3/components/components.manifest', 1), 'add_manifest',
+ ManifestBinaryComponent('addon3/components', 'addon3.so')),
+ (('manifest', 5), 'add', 'foo/bar/foo.html', foo_html),
+ (('manifest', 5), 'add', 'foo/bar/bar.html', bar_html),
+ (('manifest', 9), 'add', 'addon/install.rdf', install_rdf),
+ (('manifest', 10), 'add', 'addon2/install.rdf', install_rdf),
+ (('manifest', 11), 'add', 'addon3/install.rdf', install_rdf),
+ (('manifest', 12), 'add', 'addon4/install.rdf',
+ install_rdf_addon4),
+ (('manifest', 13), 'add', 'addon5/install.rdf',
+ install_rdf_addon5),
+ (('manifest', 14), 'add', 'addon6/install.rdf',
+ install_rdf_addon6),
+ (('manifest', 15), 'add', 'addon7/install.rdf',
+ install_rdf_addon7),
+ (('manifest', 16), 'add', 'addon8/install.rdf',
+ install_rdf_addon8),
+ (('manifest', 17), 'add', 'addon9/install.rdf',
+ install_rdf_addon9),
+ (('manifest', 18), 'add', 'addon10/install.rdf',
+ install_rdf_addon10),
+ (('manifest', 19), 'add', 'addon11/install.rdf',
+ install_rdf_addon11),
+ ])
+
+ self.assertEqual(packager.get_bases(),
+ set(['', 'addon', 'addon2', 'addon3', 'addon4',
+ 'addon5', 'addon6', 'addon7', 'addon8',
+ 'addon9', 'addon10', 'addon11', 'qux']))
+ self.assertEqual(packager.get_bases(addons=False), set(['', 'qux']))
+
+ def test_simple_packager_manifest_consistency(self):
+ formatter = MockFormatter()
+ # bar/ is detected as an addon because of install.rdf, but top-level
+ # includes a manifest inside bar/.
+ packager = SimplePackager(formatter)
+ packager.add('base.manifest', GeneratedFile(
+ 'manifest foo/bar.manifest\n'
+ 'manifest bar/baz.manifest\n'
+ ))
+ packager.add('foo/bar.manifest', GeneratedFile('resource bar bar'))
+ packager.add('bar/baz.manifest', GeneratedFile('resource baz baz'))
+ packager.add('bar/install.rdf', GeneratedFile(''))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(e.exception.message,
+ 'Error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"')
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but top-level includes another manifest inside
+ # bar/.
+ packager = SimplePackager(formatter)
+ packager.add('base.manifest', GeneratedFile(
+ 'manifest foo/bar.manifest\n'
+ 'manifest bar/baz.manifest\n'
+ ))
+ packager.add('foo/bar.manifest', GeneratedFile('resource bar bar'))
+ packager.add('bar/baz.manifest', GeneratedFile('resource baz baz'))
+ packager.add('bar/chrome.manifest', GeneratedFile('resource baz baz'))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(e.exception.message,
+ 'Error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"')
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but chrome.manifest includes baz.manifest from
+ # the same directory. This shouldn't error out.
+ packager = SimplePackager(formatter)
+ packager.add('base.manifest', GeneratedFile(
+ 'manifest foo/bar.manifest\n'
+ ))
+ packager.add('foo/bar.manifest', GeneratedFile('resource bar bar'))
+ packager.add('bar/baz.manifest', GeneratedFile('resource baz baz'))
+ packager.add('bar/chrome.manifest',
+ GeneratedFile('manifest baz.manifest'))
+ packager.close()
+
+
+class TestSimpleManifestSink(unittest.TestCase):
+ def test_simple_manifest_parser(self):
+ formatter = MockFormatter()
+ foobar = GeneratedFile('foobar')
+ foobaz = GeneratedFile('foobaz')
+ fooqux = GeneratedFile('fooqux')
+ foozot = GeneratedFile('foozot')
+ finder = MockFinder({
+ 'bin/foo/bar': foobar,
+ 'bin/foo/baz': foobaz,
+ 'bin/foo/qux': fooqux,
+ 'bin/foo/zot': foozot,
+ 'bin/foo/chrome.manifest': GeneratedFile('resource foo foo/'),
+ 'bin/chrome.manifest':
+ GeneratedFile('manifest foo/chrome.manifest'),
+ })
+ parser = SimpleManifestSink(finder, formatter)
+ component0 = Component('component0')
+ component1 = Component('component1')
+ component2 = Component('component2', destdir='destdir')
+ parser.add(component0, 'bin/foo/b*')
+ parser.add(component1, 'bin/foo/qux')
+ parser.add(component1, 'bin/foo/chrome.manifest')
+ parser.add(component2, 'bin/foo/zot')
+ self.assertRaises(ErrorMessage, parser.add, 'component1', 'bin/bar')
+
+ self.assertEqual(formatter.log, [])
+ parser.close()
+ self.assertEqual(formatter.log, [
+ (None, 'add_base', '', False),
+ (('foo/chrome.manifest', 1),
+ 'add_manifest', ManifestResource('foo', 'foo', 'foo/')),
+ (None, 'add', 'foo/bar', foobar),
+ (None, 'add', 'foo/baz', foobaz),
+ (None, 'add', 'foo/qux', fooqux),
+ (None, 'add', 'destdir/foo/zot', foozot),
+ ])
+
+ self.assertEqual(finder.log, [
+ 'bin/foo/b*',
+ 'bin/foo/qux',
+ 'bin/foo/chrome.manifest',
+ 'bin/foo/zot',
+ 'bin/bar',
+ 'bin/chrome.manifest'
+ ])
+
+
+class TestCallDeque(unittest.TestCase):
+ def test_call_deque(self):
+ class Logger(object):
+ def __init__(self):
+ self._log = []
+
+ def log(self, str):
+ self._log.append(str)
+
+ @staticmethod
+ def staticlog(logger, str):
+ logger.log(str)
+
+ def do_log(logger, str):
+ logger.log(str)
+
+ logger = Logger()
+ d = CallDeque()
+ d.append(logger.log, 'foo')
+ d.append(logger.log, 'bar')
+ d.append(logger.staticlog, logger, 'baz')
+ d.append(do_log, logger, 'qux')
+ self.assertEqual(logger._log, [])
+ d.execute()
+ self.assertEqual(logger._log, ['foo', 'bar', 'baz', 'qux'])
+
+
+class TestComponent(unittest.TestCase):
+ def do_split(self, string, name, options):
+ n, o = Component._split_component_and_options(string)
+ self.assertEqual(name, n)
+ self.assertEqual(options, o)
+
+ def test_component_split_component_and_options(self):
+ self.do_split('component', 'component', {})
+ self.do_split('trailingspace ', 'trailingspace', {})
+ self.do_split(' leadingspace', 'leadingspace', {})
+ self.do_split(' trim ', 'trim', {})
+ self.do_split(' trim key="value"', 'trim', {'key':'value'})
+ self.do_split(' trim empty=""', 'trim', {'empty':''})
+ self.do_split(' trim space=" "', 'trim', {'space':' '})
+ self.do_split('component key="value" key2="second" ',
+ 'component', {'key':'value', 'key2':'second'})
+ self.do_split( 'trim key=" value with spaces " key2="spaces again"',
+ 'trim', {'key':' value with spaces ', 'key2': 'spaces again'})
+
+ def do_split_error(self, string):
+ self.assertRaises(ValueError, Component._split_component_and_options, string)
+
+ def test_component_split_component_and_options_errors(self):
+ self.do_split_error('"component')
+ self.do_split_error('comp"onent')
+ self.do_split_error('component"')
+ self.do_split_error('"component"')
+ self.do_split_error('=component')
+ self.do_split_error('comp=onent')
+ self.do_split_error('component=')
+ self.do_split_error('key="val"')
+ self.do_split_error('component key=')
+ self.do_split_error('component key="val')
+ self.do_split_error('component key=val"')
+ self.do_split_error('component key="val" x')
+ self.do_split_error('component x key="val"')
+ self.do_split_error('component key1="val" x key2="val"')
+
+ def do_from_string(self, string, name, destdir=''):
+ component = Component.from_string(string)
+ self.assertEqual(name, component.name)
+ self.assertEqual(destdir, component.destdir)
+
+ def test_component_from_string(self):
+ self.do_from_string('component', 'component')
+ self.do_from_string('component-with-hyphen', 'component-with-hyphen')
+ self.do_from_string('component destdir="foo/bar"', 'component', 'foo/bar')
+ self.do_from_string('component destdir="bar spc"', 'component', 'bar spc')
+ self.assertRaises(ErrorMessage, Component.from_string, '')
+ self.assertRaises(ErrorMessage, Component.from_string, 'component novalue=')
+ self.assertRaises(ErrorMessage, Component.from_string, 'component badoption=badvalue')
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py
new file mode 100644
index 000000000..1af4336b2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_formats.py
@@ -0,0 +1,428 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+import unittest
+from mozpack.packager.formats import (
+ FlatFormatter,
+ JarFormatter,
+ OmniJarFormatter,
+)
+from mozpack.copier import FileRegistry
+from mozpack.files import (
+ GeneratedFile,
+ ManifestFile,
+)
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestComponent,
+ ManifestResource,
+ ManifestBinaryComponent,
+)
+from mozpack.test.test_files import (
+ MockDest,
+ foo_xpt,
+ foo2_xpt,
+ bar_xpt,
+ read_interfaces,
+)
+import mozpack.path as mozpath
+
+
+CONTENTS = {
+ 'bases': {
+ # base_path: is_addon?
+ '': False,
+ 'app': False,
+ 'addon0': 'unpacked',
+ 'addon1': True,
+ },
+ 'manifests': [
+ ManifestContent('chrome/f', 'oo', 'oo/'),
+ ManifestContent('chrome/f', 'bar', 'oo/bar/'),
+ ManifestResource('chrome/f', 'foo', 'resource://bar/'),
+ ManifestBinaryComponent('components', 'foo.so'),
+ ManifestContent('app/chrome', 'content', 'foo/'),
+ ManifestComponent('app/components', '{foo-id}', 'foo.js'),
+ ManifestContent('addon0/chrome', 'content', 'foo/bar/'),
+ ManifestContent('addon1/chrome', 'content', 'foo/bar/'),
+ ],
+ 'files': {
+ 'chrome/f/oo/bar/baz': GeneratedFile('foobarbaz'),
+ 'chrome/f/oo/baz': GeneratedFile('foobaz'),
+ 'chrome/f/oo/qux': GeneratedFile('fooqux'),
+ 'components/foo.so': GeneratedFile('foo.so'),
+ 'components/foo.xpt': foo_xpt,
+ 'components/bar.xpt': bar_xpt,
+ 'foo': GeneratedFile('foo'),
+ 'app/chrome/foo/foo': GeneratedFile('appfoo'),
+ 'app/components/foo.js': GeneratedFile('foo.js'),
+ 'addon0/chrome/foo/bar/baz': GeneratedFile('foobarbaz'),
+ 'addon0/components/foo.xpt': foo2_xpt,
+ 'addon0/components/bar.xpt': bar_xpt,
+ 'addon1/chrome/foo/bar/baz': GeneratedFile('foobarbaz'),
+ 'addon1/components/foo.xpt': foo2_xpt,
+ 'addon1/components/bar.xpt': bar_xpt,
+ },
+}
+
+FILES = CONTENTS['files']
+
+RESULT_FLAT = {
+ 'chrome.manifest': [
+ 'manifest chrome/chrome.manifest',
+ 'manifest components/components.manifest',
+ ],
+ 'chrome/chrome.manifest': [
+ 'manifest f/f.manifest',
+ ],
+ 'chrome/f/f.manifest': [
+ 'content oo oo/',
+ 'content bar oo/bar/',
+ 'resource foo resource://bar/',
+ ],
+ 'chrome/f/oo/bar/baz': FILES['chrome/f/oo/bar/baz'],
+ 'chrome/f/oo/baz': FILES['chrome/f/oo/baz'],
+ 'chrome/f/oo/qux': FILES['chrome/f/oo/qux'],
+ 'components/components.manifest': [
+ 'binary-component foo.so',
+ 'interfaces interfaces.xpt',
+ ],
+ 'components/foo.so': FILES['components/foo.so'],
+ 'components/interfaces.xpt': {
+ 'foo': read_interfaces(foo_xpt.open())['foo'],
+ 'bar': read_interfaces(bar_xpt.open())['bar'],
+ },
+ 'foo': FILES['foo'],
+ 'app/chrome.manifest': [
+ 'manifest chrome/chrome.manifest',
+ 'manifest components/components.manifest',
+ ],
+ 'app/chrome/chrome.manifest': [
+ 'content content foo/',
+ ],
+ 'app/chrome/foo/foo': FILES['app/chrome/foo/foo'],
+ 'app/components/components.manifest': [
+ 'component {foo-id} foo.js',
+ ],
+ 'app/components/foo.js': FILES['app/components/foo.js'],
+}
+
+for addon in ('addon0', 'addon1'):
+ RESULT_FLAT.update({
+ mozpath.join(addon, p): f
+ for p, f in {
+ 'chrome.manifest': [
+ 'manifest chrome/chrome.manifest',
+ 'manifest components/components.manifest',
+ ],
+ 'chrome/chrome.manifest': [
+ 'content content foo/bar/',
+ ],
+ 'chrome/foo/bar/baz': FILES[mozpath.join(addon, 'chrome/foo/bar/baz')],
+ 'components/components.manifest': [
+ 'interfaces interfaces.xpt',
+ ],
+ 'components/interfaces.xpt': {
+ 'foo': read_interfaces(foo2_xpt.open())['foo'],
+ 'bar': read_interfaces(bar_xpt.open())['bar'],
+ },
+ }.iteritems()
+ })
+
+RESULT_JAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ 'chrome.manifest',
+ 'chrome/chrome.manifest',
+ 'components/components.manifest',
+ 'components/foo.so',
+ 'components/interfaces.xpt',
+ 'foo',
+ 'app/chrome.manifest',
+ 'app/components/components.manifest',
+ 'app/components/foo.js',
+ 'addon0/chrome.manifest',
+ 'addon0/components/components.manifest',
+ 'addon0/components/interfaces.xpt',
+ )
+}
+
+RESULT_JAR.update({
+ 'chrome/f/f.manifest': [
+ 'content oo jar:oo.jar!/',
+ 'content bar jar:oo.jar!/bar/',
+ 'resource foo resource://bar/',
+ ],
+ 'chrome/f/oo.jar': {
+ 'bar/baz': FILES['chrome/f/oo/bar/baz'],
+ 'baz': FILES['chrome/f/oo/baz'],
+ 'qux': FILES['chrome/f/oo/qux'],
+ },
+ 'app/chrome/chrome.manifest': [
+ 'content content jar:foo.jar!/',
+ ],
+ 'app/chrome/foo.jar': {
+ 'foo': FILES['app/chrome/foo/foo'],
+ },
+ 'addon0/chrome/chrome.manifest': [
+ 'content content jar:foo.jar!/bar/',
+ ],
+ 'addon0/chrome/foo.jar': {
+ 'bar/baz': FILES['addon0/chrome/foo/bar/baz'],
+ },
+ 'addon1.xpi': {
+ mozpath.relpath(p, 'addon1'): f
+ for p, f in RESULT_FLAT.iteritems()
+ if p.startswith('addon1/')
+ },
+})
+
+RESULT_OMNIJAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ 'components/foo.so',
+ 'foo',
+ )
+}
+
+RESULT_OMNIJAR.update({
+ p: RESULT_JAR[p]
+ for p in RESULT_JAR
+ if p.startswith('addon')
+})
+
+RESULT_OMNIJAR.update({
+ 'omni.foo': {
+ 'components/components.manifest': [
+ 'interfaces interfaces.xpt',
+ ],
+ },
+ 'chrome.manifest': [
+ 'manifest components/components.manifest',
+ ],
+ 'components/components.manifest': [
+ 'binary-component foo.so',
+ ],
+ 'app/omni.foo': {
+ p: RESULT_FLAT['app/' + p]
+ for p in (
+ 'chrome.manifest',
+ 'chrome/chrome.manifest',
+ 'chrome/foo/foo',
+ 'components/components.manifest',
+ 'components/foo.js',
+ )
+ },
+ 'app/chrome.manifest': [],
+})
+
+RESULT_OMNIJAR['omni.foo'].update({
+ p: RESULT_FLAT[p]
+ for p in (
+ 'chrome.manifest',
+ 'chrome/chrome.manifest',
+ 'chrome/f/f.manifest',
+ 'chrome/f/oo/bar/baz',
+ 'chrome/f/oo/baz',
+ 'chrome/f/oo/qux',
+ 'components/interfaces.xpt',
+ )
+})
+
+CONTENTS_WITH_BASE = {
+ 'bases': {
+ mozpath.join('base/root', b) if b else 'base/root': a
+ for b, a in CONTENTS['bases'].iteritems()
+ },
+ 'manifests': [
+ m.move(mozpath.join('base/root', m.base))
+ for m in CONTENTS['manifests']
+ ],
+ 'files': {
+ mozpath.join('base/root', p): f
+ for p, f in CONTENTS['files'].iteritems()
+ },
+}
+
+EXTRA_CONTENTS = {
+ 'extra/file': GeneratedFile('extra file'),
+}
+
+CONTENTS_WITH_BASE['files'].update(EXTRA_CONTENTS)
+
+def result_with_base(results):
+ result = {
+ mozpath.join('base/root', p): v
+ for p, v in results.iteritems()
+ }
+ result.update(EXTRA_CONTENTS)
+ return result
+
+RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT)
+RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR)
+RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR)
+
+
+class MockDest(MockDest):
+ def exists(self):
+ return False
+
+
+def fill_formatter(formatter, contents):
+ for base, is_addon in contents['bases'].items():
+ formatter.add_base(base, is_addon)
+
+ for manifest in contents['manifests']:
+ formatter.add_manifest(manifest)
+
+ for k, v in contents['files'].iteritems():
+ if k.endswith('.xpt'):
+ formatter.add_interfaces(k, v)
+ else:
+ formatter.add(k, v)
+
+
+def get_contents(registry, read_all=False):
+ result = {}
+ for k, v in registry:
+ if k.endswith('.xpt'):
+ tmpfile = MockDest()
+ registry[k].copy(tmpfile)
+ result[k] = read_interfaces(tmpfile)
+ elif isinstance(v, FileRegistry):
+ result[k] = get_contents(v)
+ elif isinstance(v, ManifestFile) or read_all:
+ result[k] = v.open().read().splitlines()
+ else:
+ result[k] = v
+ return result
+
+
+class TestFormatters(unittest.TestCase):
+ maxDiff = None
+
+ def test_bases(self):
+ formatter = FlatFormatter(FileRegistry())
+ formatter.add_base('')
+ formatter.add_base('browser')
+ formatter.add_base('addon0', addon=True)
+ self.assertEqual(formatter._get_base('platform.ini'),
+ ('', 'platform.ini'))
+ self.assertEqual(formatter._get_base('browser/application.ini'),
+ ('browser', 'application.ini'))
+ self.assertEqual(formatter._get_base('addon0/install.rdf'),
+ ('addon0', 'install.rdf'))
+
+ def do_test_contents(self, formatter, contents):
+ for f in contents['files']:
+ # .xpt files are merged, so skip them.
+ if not f.endswith('.xpt'):
+ self.assertTrue(formatter.contains(f))
+
+ def test_flat_formatter(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_FLAT)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_jar_formatter(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_JAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_formatter(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, 'omni.foo')
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_flat_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_jar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, 'omni.foo')
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_is_resource(self):
+ def is_resource(base, path):
+ registry = FileRegistry()
+ f = OmniJarFormatter(registry, 'omni.foo', non_resources=[
+ 'defaults/messenger/mailViews.dat',
+ 'defaults/foo/*',
+ '*/dummy',
+ ])
+ f.add_base('')
+ f.add_base('app')
+ f.add(mozpath.join(base, path), GeneratedFile(''))
+ if f.copier.contains(mozpath.join(base, path)):
+ return False
+ self.assertTrue(f.copier.contains(mozpath.join(base, 'omni.foo')))
+ self.assertTrue(f.copier[mozpath.join(base, 'omni.foo')]
+ .contains(path))
+ return True
+
+ for base in ['', 'app/']:
+ self.assertTrue(is_resource(base, 'chrome'))
+ self.assertTrue(
+ is_resource(base, 'chrome/foo/bar/baz.properties'))
+ self.assertFalse(is_resource(base, 'chrome/icons/foo.png'))
+ self.assertTrue(is_resource(base, 'components/foo.js'))
+ self.assertFalse(is_resource(base, 'components/foo.so'))
+ self.assertTrue(is_resource(base, 'res/foo.css'))
+ self.assertFalse(is_resource(base, 'res/cursors/foo.png'))
+ self.assertFalse(is_resource(base, 'res/MainMenu.nib/foo'))
+ self.assertTrue(is_resource(base, 'defaults/pref/foo.js'))
+ self.assertFalse(
+ is_resource(base, 'defaults/pref/channel-prefs.js'))
+ self.assertTrue(
+ is_resource(base, 'defaults/preferences/foo.js'))
+ self.assertFalse(
+ is_resource(base, 'defaults/preferences/channel-prefs.js'))
+ self.assertTrue(is_resource(base, 'modules/foo.jsm'))
+ self.assertTrue(is_resource(base, 'greprefs.js'))
+ self.assertTrue(is_resource(base, 'hyphenation/foo'))
+ self.assertTrue(is_resource(base, 'update.locale'))
+ self.assertTrue(
+ is_resource(base, 'jsloader/resource/gre/modules/foo.jsm'))
+ self.assertFalse(is_resource(base, 'foo'))
+ self.assertFalse(is_resource(base, 'foo/bar/greprefs.js'))
+ self.assertTrue(is_resource(base, 'defaults/messenger/foo.dat'))
+ self.assertFalse(
+ is_resource(base, 'defaults/messenger/mailViews.dat'))
+ self.assertTrue(is_resource(base, 'defaults/pref/foo.js'))
+ self.assertFalse(is_resource(base, 'defaults/foo/bar.dat'))
+ self.assertFalse(is_resource(base, 'defaults/foo/bar/baz.dat'))
+ self.assertTrue(is_resource(base, 'chrome/foo/bar/baz/dummy_'))
+ self.assertFalse(is_resource(base, 'chrome/foo/bar/baz/dummy'))
+ self.assertTrue(is_resource(base, 'chrome/foo/bar/dummy_'))
+ self.assertFalse(is_resource(base, 'chrome/foo/bar/dummy'))
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py
new file mode 100644
index 000000000..c797eadd1
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_l10n.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+from test_packager import MockFinder
+from mozpack.packager import l10n
+from mozpack.files import (
+ GeneratedFile,
+ ManifestFile,
+)
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestLocale,
+ ManifestContent,
+)
+from mozpack.copier import FileRegistry
+from mozpack.packager.formats import FlatFormatter
+
+
+class TestL10NRepack(unittest.TestCase):
+ def test_l10n_repack(self):
+ foo = GeneratedFile('foo')
+ foobar = GeneratedFile('foobar')
+ qux = GeneratedFile('qux')
+ bar = GeneratedFile('bar')
+ baz = GeneratedFile('baz')
+ dict_aa = GeneratedFile('dict_aa')
+ dict_bb = GeneratedFile('dict_bb')
+ dict_cc = GeneratedFile('dict_cc')
+ barbaz = GeneratedFile('barbaz')
+ lst = GeneratedFile('foo\nbar')
+ app_finder = MockFinder({
+ 'bar/foo': foo,
+ 'chrome/foo/foobar': foobar,
+ 'chrome/qux/qux.properties': qux,
+ 'chrome/qux/baz/baz.properties': baz,
+ 'chrome/chrome.manifest': ManifestFile('chrome', [
+ ManifestContent('chrome', 'foo', 'foo/'),
+ ManifestLocale('chrome', 'qux', 'en-US', 'qux/'),
+ ]),
+ 'chrome.manifest':
+ ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]),
+ 'dict/aa': dict_aa,
+ 'app/chrome/bar/barbaz.dtd': barbaz,
+ 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [
+ ManifestLocale('app/chrome', 'bar', 'en-US', 'bar/')
+ ]),
+ 'app/chrome.manifest':
+ ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]),
+ 'app/dict/bb': dict_bb,
+ 'app/dict/cc': dict_cc,
+ 'app/chrome/bar/search/foo.xml': foo,
+ 'app/chrome/bar/search/bar.xml': bar,
+ 'app/chrome/bar/search/lst.txt': lst,
+ })
+ app_finder.jarlogs = {}
+ app_finder.base = 'app'
+ foo_l10n = GeneratedFile('foo_l10n')
+ qux_l10n = GeneratedFile('qux_l10n')
+ baz_l10n = GeneratedFile('baz_l10n')
+ barbaz_l10n = GeneratedFile('barbaz_l10n')
+ lst_l10n = GeneratedFile('foo\nqux')
+ l10n_finder = MockFinder({
+ 'chrome/qux-l10n/qux.properties': qux_l10n,
+ 'chrome/qux-l10n/baz/baz.properties': baz_l10n,
+ 'chrome/chrome.manifest': ManifestFile('chrome', [
+ ManifestLocale('chrome', 'qux', 'x-test', 'qux-l10n/'),
+ ]),
+ 'chrome.manifest':
+ ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]),
+ 'dict/bb': dict_bb,
+ 'dict/cc': dict_cc,
+ 'app/chrome/bar-l10n/barbaz.dtd': barbaz_l10n,
+ 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [
+ ManifestLocale('app/chrome', 'bar', 'x-test', 'bar-l10n/')
+ ]),
+ 'app/chrome.manifest':
+ ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]),
+ 'app/dict/aa': dict_aa,
+ 'app/chrome/bar-l10n/search/foo.xml': foo_l10n,
+ 'app/chrome/bar-l10n/search/qux.xml': qux_l10n,
+ 'app/chrome/bar-l10n/search/lst.txt': lst_l10n,
+ })
+ l10n_finder.base = 'l10n'
+ copier = FileRegistry()
+ formatter = FlatFormatter(copier)
+
+ l10n._repack(app_finder, l10n_finder, copier, formatter,
+ ['dict', 'chrome/**/search/*.xml'])
+ self.maxDiff = None
+
+ repacked = {
+ 'bar/foo': foo,
+ 'chrome/foo/foobar': foobar,
+ 'chrome/qux-l10n/qux.properties': qux_l10n,
+ 'chrome/qux-l10n/baz/baz.properties': baz_l10n,
+ 'chrome/chrome.manifest': ManifestFile('chrome', [
+ ManifestContent('chrome', 'foo', 'foo/'),
+ ManifestLocale('chrome', 'qux', 'x-test', 'qux-l10n/'),
+ ]),
+ 'chrome.manifest':
+ ManifestFile('', [Manifest('', 'chrome/chrome.manifest')]),
+ 'dict/bb': dict_bb,
+ 'dict/cc': dict_cc,
+ 'app/chrome/bar-l10n/barbaz.dtd': barbaz_l10n,
+ 'app/chrome/chrome.manifest': ManifestFile('app/chrome', [
+ ManifestLocale('app/chrome', 'bar', 'x-test', 'bar-l10n/')
+ ]),
+ 'app/chrome.manifest':
+ ManifestFile('app', [Manifest('app', 'chrome/chrome.manifest')]),
+ 'app/dict/aa': dict_aa,
+ 'app/chrome/bar-l10n/search/foo.xml': foo_l10n,
+ 'app/chrome/bar-l10n/search/qux.xml': qux_l10n,
+ 'app/chrome/bar-l10n/search/lst.txt': lst_l10n,
+ }
+
+ self.assertEqual(
+ dict((p, f.open().read()) for p, f in copier),
+ dict((p, f.open().read()) for p, f in repacked.iteritems())
+ )
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py
new file mode 100644
index 000000000..d201cabf7
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_unpack.py
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+from mozpack.packager.formats import (
+ FlatFormatter,
+ JarFormatter,
+ OmniJarFormatter,
+)
+from mozpack.packager.unpack import unpack_to_registry
+from mozpack.copier import (
+ FileCopier,
+ FileRegistry,
+)
+from mozpack.test.test_packager_formats import (
+ CONTENTS,
+ fill_formatter,
+ get_contents,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestUnpack(TestWithTmpDir):
+ maxDiff = None
+
+ @staticmethod
+ def _get_copier(cls):
+ copier = FileCopier()
+ formatter = cls(copier)
+ fill_formatter(formatter, CONTENTS)
+ return copier
+
+ @classmethod
+ def setUpClass(cls):
+ cls.contents = get_contents(cls._get_copier(FlatFormatter),
+ read_all=True)
+
+ def _unpack_test(self, cls):
+ # Format a package with the given formatter class
+ copier = self._get_copier(cls)
+ copier.copy(self.tmpdir)
+
+ # Unpack that package. Its content is expected to match that of a Flat
+ # formatted package.
+ registry = FileRegistry()
+ unpack_to_registry(self.tmpdir, registry)
+ self.assertEqual(get_contents(registry, read_all=True), self.contents)
+
+ def test_flat_unpack(self):
+ self._unpack_test(FlatFormatter)
+
+ def test_jar_unpack(self):
+ self._unpack_test(JarFormatter)
+
+ def test_omnijar_unpack(self):
+ class OmniFooFormatter(OmniJarFormatter):
+ def __init__(self, registry):
+ super(OmniFooFormatter, self).__init__(registry, 'omni.foo')
+
+ self._unpack_test(OmniFooFormatter)
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py
new file mode 100644
index 000000000..ee41e4a69
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_path.py
@@ -0,0 +1,143 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.path import (
+ relpath,
+ join,
+ normpath,
+ dirname,
+ commonprefix,
+ basename,
+ split,
+ splitext,
+ basedir,
+ match,
+ rebase,
+)
+import unittest
+import mozunit
+import os
+
+
+class TestPath(unittest.TestCase):
+ SEP = os.sep
+
+ def test_relpath(self):
+ self.assertEqual(relpath('foo', 'foo'), '')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar'), '')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo'), 'bar')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar', 'baz')), 'foo'),
+ 'bar/baz')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar/baz'),
+ '..')
+ self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/baz'),
+ '../bar')
+ self.assertEqual(relpath('foo/', 'foo'), '')
+ self.assertEqual(relpath('foo/bar/', 'foo'), 'bar')
+
+ def test_join(self):
+ self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz')
+ self.assertEqual(join('foo', '', 'bar'), 'foo/bar')
+ self.assertEqual(join('', 'foo', 'bar'), 'foo/bar')
+ self.assertEqual(join('', 'foo', '/bar'), '/bar')
+
+ def test_normpath(self):
+ self.assertEqual(normpath(self.SEP.join(('foo', 'bar', 'baz',
+ '..', 'qux'))), 'foo/bar/qux')
+
+ def test_dirname(self):
+ self.assertEqual(dirname('foo/bar/baz'), 'foo/bar')
+ self.assertEqual(dirname('foo/bar'), 'foo')
+ self.assertEqual(dirname('foo'), '')
+ self.assertEqual(dirname('foo/bar/'), 'foo/bar')
+
+ def test_commonprefix(self):
+ self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+ 'foo/qux', 'foo/baz/qux']), 'foo/')
+ self.assertEqual(commonprefix([self.SEP.join(('foo', 'bar', 'baz')),
+ 'foo/qux', 'baz/qux']), '')
+
+ def test_basename(self):
+ self.assertEqual(basename('foo/bar/baz'), 'baz')
+ self.assertEqual(basename('foo/bar'), 'bar')
+ self.assertEqual(basename('foo'), 'foo')
+ self.assertEqual(basename('foo/bar/'), '')
+
+ def test_split(self):
+ self.assertEqual(split(self.SEP.join(('foo', 'bar', 'baz'))),
+ ['foo', 'bar', 'baz'])
+
+ def test_splitext(self):
+ self.assertEqual(splitext(self.SEP.join(('foo', 'bar', 'baz.qux'))),
+ ('foo/bar/baz', '.qux'))
+
+ def test_basedir(self):
+ foobarbaz = self.SEP.join(('foo', 'bar', 'baz'))
+ self.assertEqual(basedir(foobarbaz, ['foo', 'bar', 'baz']), 'foo')
+ self.assertEqual(basedir(foobarbaz, ['foo', 'foo/bar', 'baz']),
+ 'foo/bar')
+ self.assertEqual(basedir(foobarbaz, ['foo/bar', 'foo', 'baz']),
+ 'foo/bar')
+ self.assertEqual(basedir(foobarbaz, ['foo', 'bar', '']), 'foo')
+ self.assertEqual(basedir(foobarbaz, ['bar', 'baz', '']), '')
+
+ def test_match(self):
+ self.assertTrue(match('foo', ''))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo'))
+ self.assertTrue(match('foo', '*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
+ self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
+ self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
+ self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
+ self.assertFalse(match('foo/bar', 'foo/*/bar'))
+ self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/**'))
+ self.assertFalse(match('foo/nobar/baz.qux', 'foo/**/bar/**'))
+ self.assertTrue(match('foo/bar', 'foo/**/bar/**'))
+
+ def test_rebase(self):
+ self.assertEqual(rebase('foo', 'foo/bar', 'bar/baz'), 'baz')
+ self.assertEqual(rebase('foo', 'foo', 'bar/baz'), 'bar/baz')
+ self.assertEqual(rebase('foo/bar', 'foo', 'baz'), 'bar/baz')
+
+
+if os.altsep:
+ class TestAltPath(TestPath):
+ SEP = os.altsep
+
+ class TestReverseAltPath(TestPath):
+ def setUp(self):
+ sep = os.sep
+ os.sep = os.altsep
+ os.altsep = sep
+
+ def tearDown(self):
+ self.setUp()
+
+ class TestAltReverseAltPath(TestReverseAltPath):
+ SEP = os.altsep
+
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py
new file mode 100644
index 000000000..a2bbb4470
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_unify.py
@@ -0,0 +1,199 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensureParentDir
+
+from mozpack.unify import (
+ UnifiedFinder,
+ UnifiedBuildFinder,
+)
+import mozunit
+from mozpack.test.test_files import TestWithTmpDir
+from mozpack.files import FileFinder
+from mozpack.mozjar import JarWriter
+from mozpack.test.test_files import MockDest
+from cStringIO import StringIO
+import os
+import sys
+from mozpack.errors import (
+ ErrorMessage,
+ AccumulatedErrors,
+ errors,
+)
+
+
+class TestUnified(TestWithTmpDir):
+ def create_one(self, which, path, content):
+ file = self.tmppath(os.path.join(which, path))
+ ensureParentDir(file)
+ open(file, 'wb').write(content)
+
+ def create_both(self, path, content):
+ for p in ['a', 'b']:
+ self.create_one(p, path, content)
+
+
+class TestUnifiedFinder(TestUnified):
+ def test_unified_finder(self):
+ self.create_both('foo/bar', 'foobar')
+ self.create_both('foo/baz', 'foobaz')
+ self.create_one('a', 'bar', 'bar')
+ self.create_one('b', 'baz', 'baz')
+ self.create_one('a', 'qux', 'foobar')
+ self.create_one('b', 'qux', 'baz')
+ self.create_one('a', 'test/foo', 'a\nb\nc\n')
+ self.create_one('b', 'test/foo', 'b\nc\na\n')
+ self.create_both('test/bar', 'a\nb\nc\n')
+
+ finder = UnifiedFinder(FileFinder(self.tmppath('a')),
+ FileFinder(self.tmppath('b')),
+ sorted=['test'])
+ self.assertEqual(sorted([(f, c.open().read())
+ for f, c in finder.find('foo')]),
+ [('foo/bar', 'foobar'), ('foo/baz', 'foobaz')])
+ self.assertRaises(ErrorMessage, any, finder.find('bar'))
+ self.assertRaises(ErrorMessage, any, finder.find('baz'))
+ self.assertRaises(ErrorMessage, any, finder.find('qux'))
+ self.assertEqual(sorted([(f, c.open().read())
+ for f, c in finder.find('test')]),
+ [('test/bar', 'a\nb\nc\n'),
+ ('test/foo', 'a\nb\nc\n')])
+
+
+class TestUnifiedBuildFinder(TestUnified):
+ def test_unified_build_finder(self):
+ finder = UnifiedBuildFinder(FileFinder(self.tmppath('a')),
+ FileFinder(self.tmppath('b')))
+
+ # Test chrome.manifest unification
+ self.create_both('chrome.manifest', 'a\nb\nc\n')
+ self.create_one('a', 'chrome/chrome.manifest', 'a\nb\nc\n')
+ self.create_one('b', 'chrome/chrome.manifest', 'b\nc\na\n')
+ self.assertEqual(sorted([(f, c.open().read()) for f, c in
+ finder.find('**/chrome.manifest')]),
+ [('chrome.manifest', 'a\nb\nc\n'),
+ ('chrome/chrome.manifest', 'a\nb\nc\n')])
+
+ # Test buildconfig.html unification
+ self.create_one('a', 'chrome/browser/foo/buildconfig.html',
+ '\n'.join([
+ '<html>',
+ '<body>',
+ '<h1>about:buildconfig</h1>',
+ '<div>foo</div>',
+ '</body>',
+ '</html>',
+ ]))
+ self.create_one('b', 'chrome/browser/foo/buildconfig.html',
+ '\n'.join([
+ '<html>',
+ '<body>',
+ '<h1>about:buildconfig</h1>',
+ '<div>bar</div>',
+ '</body>',
+ '</html>',
+ ]))
+ self.assertEqual(sorted([(f, c.open().read()) for f, c in
+ finder.find('**/buildconfig.html')]),
+ [('chrome/browser/foo/buildconfig.html', '\n'.join([
+ '<html>',
+ '<body>',
+ '<h1>about:buildconfig</h1>',
+ '<div>foo</div>',
+ '<hr> </hr>',
+ '<div>bar</div>',
+ '</body>',
+ '</html>',
+ ]))])
+
+ # Test xpi file unification
+ xpi = MockDest()
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add('foo', 'foo')
+ jar.add('bar', 'bar')
+ foo_xpi = xpi.read()
+ self.create_both('foo.xpi', foo_xpi)
+
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add('foo', 'bar')
+ self.create_one('a', 'bar.xpi', foo_xpi)
+ self.create_one('b', 'bar.xpi', xpi.read())
+
+ errors.out = StringIO()
+ with self.assertRaises(AccumulatedErrors), errors.accumulate():
+ self.assertEqual([(f, c.open().read()) for f, c in
+ finder.find('*.xpi')],
+ [('foo.xpi', foo_xpi)])
+ errors.out = sys.stderr
+
+ # Test install.rdf unification
+ x86_64 = 'Darwin_x86_64-gcc3'
+ x86 = 'Darwin_x86-gcc3'
+ target_tag = '<{em}targetPlatform>{platform}</{em}targetPlatform>'
+ target_attr = '{em}targetPlatform="{platform}" '
+
+ rdf_tag = ''.join([
+ '<{RDF}Description {em}bar="bar" {em}qux="qux">',
+ '<{em}foo>foo</{em}foo>',
+ '{targets}',
+ '<{em}baz>baz</{em}baz>',
+ '</{RDF}Description>'
+ ])
+ rdf_attr = ''.join([
+ '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">',
+ '{targets}',
+ '<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>',
+ '</{RDF}Description>'
+ ])
+
+ for descr_ns, target_ns in (('RDF:', ''), ('', 'em:'), ('RDF:', 'em:')):
+ # First we need to infuse the above strings with our namespaces and
+ # platform values.
+ ns = { 'RDF': descr_ns, 'em': target_ns }
+ target_tag_x86_64 = target_tag.format(platform=x86_64, **ns)
+ target_tag_x86 = target_tag.format(platform=x86, **ns)
+ target_attr_x86_64 = target_attr.format(platform=x86_64, **ns)
+ target_attr_x86 = target_attr.format(platform=x86, **ns)
+
+ tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns)
+ tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns)
+ tag_merged = rdf_tag.format(targets=target_tag_x86_64 + target_tag_x86, **ns)
+ tag_empty = rdf_tag.format(targets="", **ns)
+
+ attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns)
+ attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns)
+ attr_merged = rdf_attr.format(attr="", targets=target_tag_x86_64 + target_tag_x86, **ns)
+
+ # This table defines the test cases, columns "a" and "b" being the
+ # contents of the install.rdf of the respective platform and
+ # "result" the exepected merged content after unification.
+ testcases = (
+ #_____a_____ _____b_____ ___result___#
+ (tag_x86_64, tag_x86, tag_merged ),
+ (tag_x86_64, tag_empty, tag_empty ),
+ (tag_empty, tag_x86, tag_empty ),
+ (tag_empty, tag_empty, tag_empty ),
+
+ (attr_x86_64, attr_x86, attr_merged ),
+ (tag_x86_64, attr_x86, tag_merged ),
+ (attr_x86_64, tag_x86, attr_merged ),
+
+ (attr_x86_64, tag_empty, tag_empty ),
+ (tag_empty, attr_x86, tag_empty )
+ )
+
+ # Now create the files from the above table and compare
+ results = []
+ for emid, (rdf_a, rdf_b, result) in enumerate(testcases):
+ filename = 'ext/id{0}/install.rdf'.format(emid)
+ self.create_one('a', filename, rdf_a)
+ self.create_one('b', filename, rdf_b)
+ results.append((filename, result))
+
+ self.assertEqual(sorted([(f, c.open().read()) for f, c in
+ finder.find('**/install.rdf')]), results)
+
+
+if __name__ == '__main__':
+ mozunit.main()