summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rwxr-xr-xtools/isolate/data/isolate/child.py42
-rw-r--r--tools/isolate/data/isolate/fail.isolate18
-rw-r--r--tools/isolate/data/isolate/files1/test_file1.txt (renamed from tools/isolate/data/isolate/test_file1.txt)0
-rw-r--r--tools/isolate/data/isolate/files1/test_file2.txt (renamed from tools/isolate/data/isolate/test_file2.txt)0
-rw-r--r--tools/isolate/data/isolate/missing_trailing_slash.isolate11
-rw-r--r--tools/isolate/data/isolate/no_run.isolate13
-rw-r--r--tools/isolate/data/isolate/non_existent.isolate10
-rw-r--r--tools/isolate/data/isolate/ok.isolate18
-rwxr-xr-xtools/isolate/isolate.py389
-rwxr-xr-xtools/isolate/isolate_smoke_test.py186
-rwxr-xr-xtools/isolate/isolate_test.py23
-rwxr-xr-xtools/isolate/merge_isolate.py458
-rwxr-xr-xtools/isolate/merge_isolate_test.py351
-rwxr-xr-xtools/isolate/trace_inputs.py23
-rwxr-xr-xtools/isolate/trace_inputs_smoke_test.py4
15 files changed, 995 insertions, 551 deletions
diff --git a/tools/isolate/data/isolate/child.py b/tools/isolate/data/isolate/child.py
new file mode 100755
index 0000000..b3f1854e
--- /dev/null
+++ b/tools/isolate/data/isolate/child.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def main():
+ print 'child: verify the test data files were mapped properly'
+ files = sorted(os.listdir('files1'))
+ tree = {
+ 'test_file1.txt': 'Foo\n',
+ 'test_file2.txt': 'Bar\n',
+ }
+
+ # For now, ignore .svn directory, which happens to be there with --mode=trace
+ # from a svn checkout. The file shouldn't be there when --mode=run is used.
+ # TODO(maruel): Differentiate between the two modes and detect .svn
+ # directories in --mode=run.
+ if '.svn' in files:
+ files.remove('.svn')
+
+ if files != sorted(tree):
+ print '%s != %s' % (files, sorted(tree))
+ return 2
+ for k, v in tree.iteritems():
+ content = open(os.path.join('files1', k), 'rb').read()
+ if v != content:
+ print '%s: %r != %r' % (k, v, content)
+ return 3
+
+ if sys.argv[1] == '--ok':
+ return 0
+ elif sys.argv[1] == '--fail':
+ return 1
+ return 4
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/isolate/data/isolate/fail.isolate b/tools/isolate/data/isolate/fail.isolate
new file mode 100644
index 0000000..9d2744d
--- /dev/null
+++ b/tools/isolate/data/isolate/fail.isolate
@@ -0,0 +1,18 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'variables': {
+ 'command': [
+ 'python',
+ 'child.py',
+ '--fail',
+ ],
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/data/isolate/child.py',
+ ],
+ 'isolate_dependency_untracked': [
+ 'files1/',
+ ],
+ },
+}
diff --git a/tools/isolate/data/isolate/test_file1.txt b/tools/isolate/data/isolate/files1/test_file1.txt
index bc56c4d..bc56c4d 100644
--- a/tools/isolate/data/isolate/test_file1.txt
+++ b/tools/isolate/data/isolate/files1/test_file1.txt
diff --git a/tools/isolate/data/isolate/test_file2.txt b/tools/isolate/data/isolate/files1/test_file2.txt
index ebd7525..ebd7525 100644
--- a/tools/isolate/data/isolate/test_file2.txt
+++ b/tools/isolate/data/isolate/files1/test_file2.txt
diff --git a/tools/isolate/data/isolate/missing_trailing_slash.isolate b/tools/isolate/data/isolate/missing_trailing_slash.isolate
new file mode 100644
index 0000000..676d102
--- /dev/null
+++ b/tools/isolate/data/isolate/missing_trailing_slash.isolate
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'variables': {
+ 'isolate_dependency_untracked': [
+ # Directory missing trailing slash.
+ 'files1',
+ ],
+ },
+}
diff --git a/tools/isolate/data/isolate/no_run.isolate b/tools/isolate/data/isolate/no_run.isolate
new file mode 100644
index 0000000..d0b6aea
--- /dev/null
+++ b/tools/isolate/data/isolate/no_run.isolate
@@ -0,0 +1,13 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'variables': {
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/data/isolate/child.py',
+ ],
+ 'isolate_dependency_untracked': [
+ 'files1/',
+ ],
+ },
+}
diff --git a/tools/isolate/data/isolate/non_existent.isolate b/tools/isolate/data/isolate/non_existent.isolate
new file mode 100644
index 0000000..8e344d9
--- /dev/null
+++ b/tools/isolate/data/isolate/non_existent.isolate
@@ -0,0 +1,10 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'variables': {
+ 'isolate_dependency_tracked': [
+ 'A_file_that_do_not_exist',
+ ],
+ },
+}
diff --git a/tools/isolate/data/isolate/ok.isolate b/tools/isolate/data/isolate/ok.isolate
new file mode 100644
index 0000000..db2f423
--- /dev/null
+++ b/tools/isolate/data/isolate/ok.isolate
@@ -0,0 +1,18 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'variables': {
+ 'command': [
+ 'python',
+ 'child.py',
+ '--ok',
+ ],
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/data/isolate/child.py',
+ ],
+ 'isolate_dependency_untracked': [
+ 'files1/',
+ ],
+ },
+}
diff --git a/tools/isolate/isolate.py b/tools/isolate/isolate.py
index a2e87fe..41320f3 100755
--- a/tools/isolate/isolate.py
+++ b/tools/isolate/isolate.py
@@ -30,9 +30,13 @@ import subprocess
import sys
import tempfile
+import merge_isolate
import trace_inputs
import run_test_from_archive
+# Used by process_inputs().
+NO_INFO, STATS_ONLY, WITH_HASH = range(56, 59)
+
def relpath(path, root):
"""os.path.relpath() that keeps trailing slash."""
@@ -45,6 +49,14 @@ def relpath(path, root):
return out
+def normpath(path):
+ """os.path.normpath() that keeps trailing slash."""
+ out = os.path.normpath(path)
+ if path.endswith(('/', os.path.sep)):
+ out += os.path.sep
+ return out
+
+
def to_relative(path, root, relative):
"""Converts any absolute path to a relative path, only if under root."""
if sys.platform == 'win32':
@@ -67,7 +79,7 @@ def expand_directories(indir, infiles, blacklist):
if os.path.isabs(relfile):
raise run_test_from_archive.MappingError(
'Can\'t map absolute path %s' % relfile)
- infile = os.path.normpath(os.path.join(indir, relfile))
+ infile = normpath(os.path.join(indir, relfile))
if not infile.startswith(indir):
raise run_test_from_archive.MappingError(
'Can\'t map file %s outside %s' % (infile, indir))
@@ -75,16 +87,21 @@ def expand_directories(indir, infiles, blacklist):
if relfile.endswith(os.path.sep):
if not os.path.isdir(infile):
raise run_test_from_archive.MappingError(
- 'Input directory %s must have a trailing slash' % infile)
+ '%s is not a directory' % infile)
for dirpath, dirnames, filenames in os.walk(infile):
# Convert the absolute path to subdir + relative subdirectory.
reldirpath = dirpath[len(indir)+1:]
- outfiles.extend(os.path.join(reldirpath, f) for f in filenames)
+ files_to_add = (os.path.join(reldirpath, f) for f in filenames)
+ outfiles.extend(f for f in files_to_add if not blacklist(f))
for index, dirname in enumerate(dirnames):
# Do not process blacklisted directories.
if blacklist(os.path.join(reldirpath, dirname)):
del dirnames[index]
else:
+ # Always add individual files even if they were blacklisted.
+ if os.path.isdir(infile):
+ raise run_test_from_archive.MappingError(
+ 'Input directory %s must have a trailing slash' % infile)
if not os.path.isfile(infile):
raise run_test_from_archive.MappingError(
'Input file %s doesn\'t exist' % infile)
@@ -92,29 +109,84 @@ def expand_directories(indir, infiles, blacklist):
return outfiles
-def process_inputs(indir, infiles, need_hash, read_only):
+def replace_variable(part, variables):
+ m = re.match(r'<\(([A-Z_]+)\)', part)
+ if m:
+ return variables[m.group(1)]
+ return part
+
+
+def eval_variables(item, variables):
+ return ''.join(
+ replace_variable(p, variables) for p in re.split(r'(<\([A-Z_]+\))', item))
+
+
+def load_isolate(content, variables, error):
+ """Loads the .isolate file. Returns the command, dependencies and read_only
+ flag.
+ """
+ # Load the .isolate file, process its conditions, retrieve the command and
+ # dependencies.
+ configs = merge_isolate.load_gyp(merge_isolate.eval_content(content))
+ flavor = trace_inputs.get_flavor()
+ config = configs.per_os.get(flavor) or configs.per_os.get(None)
+ if not config:
+ error('Failed to load configuration for \'%s\'' % flavor)
+
+ # Convert the variables and merge tracked and untracked dependencies.
+ # isolate.py doesn't care about the trackability of the dependencies.
+ infiles = [
+ eval_variables(f, variables) for f in config.tracked
+ ] + [
+ eval_variables(f, variables) for f in config.untracked
+ ]
+ command = [eval_variables(i, variables) for i in config.command]
+ return command, infiles, config.read_only
+
+
+def process_inputs(prevdict, indir, infiles, level, read_only):
"""Returns a dictionary of input files, populated with the files' mode and
hash.
+ |prevdict| is the previous dictionary. It is used to retrieve the cached sha-1
+ to skip recalculating the hash.
+
+ |level| determines the amount of information retrieved.
+ 1 loads no information. 2 loads minimal stat() information. 3 calculates the
+ sha-1 of the file's content.
+
The file mode is manipulated if read_only is True. In practice, we only save
one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r).
"""
+ assert level in (NO_INFO, STATS_ONLY, WITH_HASH)
outdict = {}
for infile in infiles:
filepath = os.path.join(indir, infile)
- filemode = stat.S_IMODE(os.stat(filepath).st_mode)
- # Remove write access for non-owner.
- filemode &= ~(stat.S_IWGRP | stat.S_IWOTH)
- if read_only:
- filemode &= ~stat.S_IWUSR
- if filemode & stat.S_IXUSR:
- filemode |= (stat.S_IXGRP | stat.S_IXOTH)
- else:
- filemode &= ~(stat.S_IXGRP | stat.S_IXOTH)
- outdict[infile] = {
- 'mode': filemode,
- }
- if need_hash:
+ outdict[infile] = {}
+ if level >= STATS_ONLY:
+ filestats = os.stat(filepath)
+ filemode = stat.S_IMODE(filestats.st_mode)
+ # Remove write access for non-owner.
+ filemode &= ~(stat.S_IWGRP | stat.S_IWOTH)
+ if read_only:
+ filemode &= ~stat.S_IWUSR
+ if filemode & stat.S_IXUSR:
+ filemode |= (stat.S_IXGRP | stat.S_IXOTH)
+ else:
+ filemode &= ~(stat.S_IXGRP | stat.S_IXOTH)
+ outdict[infile]['mode'] = filemode
+ outdict[infile]['size'] = filestats.st_size
+ # Used to skip recalculating the hash. Use the most recent update time.
+ outdict[infile]['timestamp'] = int(round(
+ max(filestats.st_mtime, filestats.st_ctime)))
+ # If the timestamp wasn't updated, carry on the sha-1.
+ if (prevdict.get(infile, {}).get('timestamp') ==
+ outdict[infile]['timestamp'] and
+ 'sha-1' in prevdict[infile]):
+ # Reuse the previous hash.
+ outdict[infile]['sha-1'] = prevdict[infile]['sha-1']
+
+ if level >= WITH_HASH and not outdict[infile].get('sha-1'):
h = hashlib.sha1()
with open(filepath, 'rb') as f:
h.update(f.read())
@@ -155,87 +227,92 @@ def recreate_tree(outdir, indir, infiles, action):
run_test_from_archive.link_file(outfile, infile, action)
-def separate_inputs_command(args, root, files):
- """Strips off the command line from the inputs.
-
- gyp provides input paths relative to cwd. Convert them to be relative to root.
- OptionParser kindly strips off '--' from sys.argv if it's provided and that's
- the first non-arg value. Manually look up if it was present in sys.argv.
- """
- cmd = []
- if '--' in args:
- i = args.index('--')
- cmd = args[i+1:]
- args = args[:i]
- elif '--' in sys.argv:
- # optparse is messing with us. Fix it manually.
- cmd = args
- args = []
- if files:
- args = [
- i.decode('utf-8') for i in open(files, 'rb').read().splitlines() if i
- ] + args
- cwd = os.getcwd()
- return [relpath(os.path.join(cwd, arg), root) for arg in args], cmd
-
-
-def isolate(outdir, resultfile, indir, infiles, mode, read_only, cmd, no_save):
+def isolate(
+ outdir, indir, infiles, mode, read_only, cmd, relative_cwd, resultfile):
"""Main function to isolate a target with its dependencies.
Arguments:
- outdir: Output directory where the result is stored. Depends on |mode|.
- - resultfile: File to save the json data.
- indir: Root directory to be used as the base directory for infiles.
- infiles: List of files, with relative path, to process.
- mode: Action to do. See file level docstring.
- read_only: Makes the temporary directory read only.
- cmd: Command to execute.
- - no_save: If True, do not touch resultfile.
+ - relative_cwd: Directory relative to the base directory where to start the
+ command from. In general, this path will be the path
+ containing the gyp file where the target was defined. This
+ relative directory may be created implicitely if a file from
+ this directory is needed to run the test. Otherwise it won't
+ be created and the process creation will fail. It's up to the
+ caller to create this directory manually before starting the
+ test.
+ - resultfile: Path where to read and write the metadata.
Some arguments are optional, dependending on |mode|. See the corresponding
MODE<mode> function for the exact behavior.
"""
mode_fn = getattr(sys.modules[__name__], 'MODE' + mode)
assert mode_fn
- assert os.path.isabs(resultfile)
- infiles = expand_directories(
- indir, infiles, lambda x: re.match(r'.*\.(svn|pyc)$', x))
-
- # Note the relative current directory.
- # In general, this path will be the path containing the gyp file where the
- # target was defined. This relative directory may be created implicitely if a
- # file from this directory is needed to run the test. Otherwise it won't be
- # created and the process creation will fail. It's up to the caller to create
- # this directory manually before starting the test.
- cwd = os.getcwd()
- relative_cwd = os.path.relpath(cwd, indir)
+ # Load the previous results as an optimization.
+ prevdict = {}
+ if resultfile and os.path.isfile(resultfile):
+ resultfile = os.path.abspath(resultfile)
+ with open(resultfile, 'rb') as f:
+ prevdict = json.load(f)
+ else:
+ resultfile = os.path.abspath(resultfile)
+ # Works with native os.path.sep but stores as '/'.
+ if 'files' in prevdict and os.path.sep != '/':
+ prevdict['files'] = dict(
+ (k.replace('/', os.path.sep), v)
+ for k, v in prevdict['files'].iteritems())
- # Workaround make behavior of passing absolute paths.
- cmd = [to_relative(i, indir, cwd) for i in cmd]
- if not cmd:
- # Note that it is exactly the reverse of relative_cwd.
- cmd = [os.path.join(os.path.relpath(indir, cwd), infiles[0])]
- if cmd[0].endswith('.py'):
- cmd.insert(0, sys.executable)
+ infiles = expand_directories(
+ indir, infiles, lambda x: re.match(r'.*\.(svn|pyc)$', x))
# Only hashtable mode really needs the sha-1.
- dictfiles = process_inputs(indir, infiles, mode == 'hashtable', read_only)
+ level = {
+ 'check': NO_INFO,
+ 'hashtable': WITH_HASH,
+ 'remap': STATS_ONLY,
+ 'run': STATS_ONLY,
+ 'trace': STATS_ONLY,
+ }
+ dictfiles = process_inputs(
+ prevdict.get('files', {}), indir, infiles, level[mode], read_only)
result = mode_fn(
outdir, indir, dictfiles, read_only, cmd, relative_cwd, resultfile)
+ out = {
+ 'command': cmd,
+ 'relative_cwd': relative_cwd,
+ 'files': dictfiles,
+ # Makes the directories read-only in addition to the files.
+ 'read_only': read_only,
+ }
+
+ # Works with native os.path.sep but stores as '/'.
+ if os.path.sep != '/':
+ out['files'] = dict(
+ (k.replace(os.path.sep, '/'), v) for k, v in out['files'].iteritems())
+
+ f = None
+ try:
+ if resultfile:
+ f = open(resultfile, 'wb')
+ else:
+ f = sys.stdout
+ json.dump(out, f, indent=2, sort_keys=True)
+ f.write('\n')
+ finally:
+ if resultfile and f:
+ f.close()
- if result == 0 and not no_save:
- # Saves the resulting file.
- out = {
- 'command': cmd,
- 'relative_cwd': relative_cwd,
- 'files': dictfiles,
- 'read_only': read_only,
- }
- with open(resultfile, 'wb') as f:
- json.dump(out, f, indent=2, sort_keys=True)
+ total_bytes = sum(i.get('size', 0) for i in out['files'].itervalues())
+ if total_bytes:
+ logging.debug('Total size: %d bytes' % total_bytes)
return result
@@ -247,13 +324,18 @@ def MODEcheck(
def MODEhashtable(
outdir, indir, dictfiles, _read_only, _cmd, _relative_cwd, resultfile):
- outdir = outdir or os.path.dirname(resultfile)
+ outdir = outdir or os.path.join(os.path.dirname(resultfile), 'hashtable')
+ if not os.path.isdir(outdir):
+ os.makedirs(outdir)
for relfile, properties in dictfiles.iteritems():
infile = os.path.join(indir, relfile)
outfile = os.path.join(outdir, properties['sha-1'])
if os.path.isfile(outfile):
- # Just do a quick check that the file size matches.
- if os.stat(infile).st_size == os.stat(outfile).st_size:
+ # Just do a quick check that the file size matches. No need to stat()
+ # again the input file, grab the value from the dict.
+ out_size = os.stat(outfile).st_size
+ in_size = dictfiles.get(infile, {}).get('size') or os.stat(infile).st_size
+ if in_size == out_size:
continue
# Otherwise, an exception will be raised.
run_test_from_archive.link_file(
@@ -287,7 +369,7 @@ def MODErun(
os.makedirs(cwd)
if read_only:
run_test_from_archive.make_writable(outdir, True)
-
+ cmd = trace_inputs.fix_python_path(cmd)
logging.info('Running %s, cwd=%s' % (cmd, cwd))
return subprocess.call(cmd, cwd=cwd)
finally:
@@ -302,11 +384,13 @@ def MODEtrace(
checkout at src/.
"""
logging.info('Running %s, cwd=%s' % (cmd, os.path.join(indir, relative_cwd)))
- try:
+ if resultfile:
# Guesswork here.
- product_dir = os.path.relpath(os.path.dirname(resultfile), indir)
- except ValueError:
- product_dir = ''
+ product_dir = os.path.dirname(resultfile)
+ if product_dir and indir:
+ product_dir = os.path.relpath(product_dir, indir)
+ else:
+ product_dir = None
return trace_inputs.trace_inputs(
'%s.log' % resultfile,
cmd,
@@ -323,36 +407,42 @@ def get_valid_modes():
def main():
+ default_variables = ['OS=%s' % trace_inputs.get_flavor()]
+ if sys.platform in ('win32', 'cygwin'):
+ default_variables.append('EXECUTABLE_SUFFIX=.exe')
+ else:
+ default_variables.append('EXECUTABLE_SUFFIX=')
valid_modes = get_valid_modes()
parser = optparse.OptionParser(
- usage='%prog [options] [inputs] -- [command line]',
+ usage='%prog [options] [.isolate file]',
description=sys.modules[__name__].__doc__)
- parser.allow_interspersed_args = False
parser.format_description = lambda *_: parser.description
parser.add_option(
- '-v', '--verbose', action='count', default=0, help='Use multiple times')
+ '-v', '--verbose',
+ action='count',
+ default=2 if 'ISOLATE_DEBUG' in os.environ else 0,
+ help='Use multiple times')
parser.add_option(
- '--mode', choices=valid_modes,
+ '-m', '--mode',
+ choices=valid_modes,
help='Determines the action to be taken: %s' % ', '.join(valid_modes))
parser.add_option(
- '--result', metavar='FILE',
- help='File containing the json information about inputs')
+ '-r', '--result',
+ metavar='FILE',
+ help='Result file to store the json manifest')
parser.add_option(
- '--root', metavar='DIR', help='Base directory to fetch files, required')
+ '-V', '--variable',
+ action='append',
+ default=default_variables,
+ dest='variables',
+ metavar='FOO=BAR',
+ help='Variables to process in the .isolate file, default: %default')
parser.add_option(
- '--outdir', metavar='DIR',
+ '-o', '--outdir', metavar='DIR',
help='Directory used to recreate the tree or store the hash table. '
- 'For run and remap, uses a /tmp subdirectory. For the other modes, '
- 'defaults to the directory containing --result')
- parser.add_option(
- '--read-only', action='store_true', default=False,
- help='Make the temporary tree read-only')
- parser.add_option(
- '--from-results', action='store_true',
- help='Loads everything from the result file instead of generating it')
- parser.add_option(
- '--files', metavar='FILE',
- help='File to be read containing input files')
+ 'If the environment variable ISOLATE_HASH_TABLE_DIR exists, it will '
+ 'be used. Otherwise, for run and remap, uses a /tmp subdirectory. '
+ 'For the other modes, defaults to the directory containing --result')
options, args = parser.parse_args()
level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)]
@@ -362,55 +452,68 @@ def main():
if not options.mode:
parser.error('--mode is required')
+ if len(args) != 1:
+ parser.error('Use only one argument which should be a .isolate file')
+ input_file = os.path.abspath(args[0])
+
+ # Extract the variables.
+ variables = dict(i.split('=', 1) for i in options.variables)
+ if not variables.get('DEPTH'):
+ parser.error('--variable DEPTH=<base dir> is required')
+
+ PATH_VARIABLES = ('DEPTH', 'PRODUCT_DIR')
+ # Process path variables as a special case. First normalize it, verifies it
+ # exists, convert it to an absolute path, then calculate relative_dir, and
+ # finally convert it back to a relative value from relative_dir.
+ abs_variables = {}
+ for i in PATH_VARIABLES:
+ if i not in variables:
+ continue
+ abs_variables[i] = os.path.normpath(variables[i])
+ if not os.path.isdir(abs_variables[i]):
+ parser.error('%s is not a directory' % abs_variables[i])
+ abs_variables[i] = os.path.abspath(abs_variables[i])
+
+ # The relative directory is automatically determined by the relative path
+ # between DEPTH and the directory containing the .isolate file.
+ isolate_dir = os.path.dirname(os.path.abspath(input_file))
+ relative_dir = os.path.relpath(isolate_dir, abs_variables['DEPTH'])
+ logging.debug('relative_dir: %s' % relative_dir)
+
+ # Directories are _relative_ to relative_dir.
+ for i in PATH_VARIABLES:
+ if i not in variables:
+ continue
+ variables[i] = os.path.relpath(abs_variables[i], isolate_dir)
- if not options.result:
- parser.error('--result is required.')
- if options.from_results:
- if not options.root:
- options.root = os.getcwd()
- if args:
- parser.error('Arguments cannot be used with --from-result')
- if options.files:
- parser.error('--files cannot be used with --from-result')
- else:
- if not options.root:
- parser.error('--root is required.')
-
- options.result = os.path.abspath(options.result)
-
- # Normalize the root input directory.
- indir = os.path.normpath(options.root)
- if not os.path.isdir(indir):
- parser.error('%s is not a directory' % indir)
-
- # Do not call abspath until it was verified the directory exists.
- indir = os.path.abspath(indir)
-
- logging.info('sys.argv: %s' % sys.argv)
- logging.info('cwd: %s' % os.getcwd())
- logging.info('Args: %s' % args)
- if not options.from_results:
- infiles, cmd = separate_inputs_command(args, indir, options.files)
- if not infiles:
- parser.error('Need at least one input file to map')
- else:
- data = json.load(open(options.result))
- cmd = data['command']
- infiles = data['files'].keys()
- os.chdir(data['relative_cwd'])
-
- logging.info('infiles: %s' % infiles)
+ logging.debug(
+ 'variables: %s' % ', '.join(
+ '%s=%s' % (k, v) for k, v in variables.iteritems()))
+
+ # TODO(maruel): Case insensitive file systems.
+ if not input_file.startswith(abs_variables['DEPTH']):
+ parser.error(
+ '%s must be under %s, as it is used as the relative start directory.' %
+ (args[0], abs_variables['DEPTH']))
+
+ command, infiles, read_only = load_isolate(
+ open(input_file, 'r').read(), variables, parser.error)
+ logging.debug('command: %s' % command)
+ logging.debug('infiles: %s' % infiles)
+ logging.debug('read_only: %s' % read_only)
+ infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles]
+ logging.debug('processed infiles: %s' % infiles)
try:
return isolate(
options.outdir,
- options.result,
- indir,
+ abs_variables['DEPTH'],
infiles,
options.mode,
- options.read_only,
- cmd,
- options.from_results)
+ read_only,
+ command,
+ relative_dir,
+ options.result)
except run_test_from_archive.MappingError, e:
print >> sys.stderr, str(e)
return 1
diff --git a/tools/isolate/isolate_smoke_test.py b/tools/isolate/isolate_smoke_test.py
index 721c3e0..727b9a1 100755
--- a/tools/isolate/isolate_smoke_test.py
+++ b/tools/isolate/isolate_smoke_test.py
@@ -15,9 +15,11 @@ import sys
import tempfile
import unittest
+import isolate
+
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
VERBOSE = False
-FILENAME = os.path.basename(__file__)
+
class CalledProcessError(subprocess.CalledProcessError):
@@ -35,14 +37,18 @@ class CalledProcessError(subprocess.CalledProcessError):
class Isolate(unittest.TestCase):
def setUp(self):
- # The reason is that FILENAME --ok is run in a temporary directory
- # without access to isolate.py
- import isolate
- self.isolate = isolate
+ # The tests assume the current directory is the file's directory.
+ os.chdir(ROOT_DIR)
self.tempdir = tempfile.mkdtemp()
self.result = os.path.join(self.tempdir, 'result')
+ self.child = os.path.join('data', 'isolate', 'child.py')
if VERBOSE:
print
+ self.files = [
+ self.child,
+ os.path.join('data', 'isolate', 'files1', 'test_file1.txt'),
+ os.path.join('data', 'isolate', 'files1', 'test_file2.txt'),
+ ]
def tearDown(self):
shutil.rmtree(self.tempdir)
@@ -60,31 +66,51 @@ class Isolate(unittest.TestCase):
min_mode |= 0200
def mode(filename):
return (min_mode | 0111) if filename.endswith('.py') else min_mode
+
+ if not isinstance(files, dict):
+ # Update files to dict.
+ files = dict((unicode(f), {u'mode': mode(f)}) for f in files)
+ # Add size and timestamp.
+ files = files.copy()
+ for k, v in files.iteritems():
+ if v:
+ filestats = os.stat(k)
+ v[u'size'] = filestats.st_size
+ # Used the skip recalculating the hash. Use the most recent update
+ # time.
+ v[u'timestamp'] = int(round(
+ max(filestats.st_mtime, filestats.st_ctime)))
+
expected = {
- u'command':
- [unicode(sys.executable)] +
- [unicode(x) for x in args],
- u'files': dict((unicode(f), {u'mode': mode(f)}) for f in files),
- u'relative_cwd': u'.',
- u'read_only': False,
+ u'files': files,
+ u'relative_cwd': u'data/isolate',
+ u'read_only': None,
}
+ if args:
+ expected[u'command'] = [u'python'] + [unicode(x) for x in args]
+ else:
+ expected[u'command'] = []
if with_hash:
for filename in expected[u'files']:
# Calculate our hash.
h = hashlib.sha1()
h.update(open(os.path.join(ROOT_DIR, filename), 'rb').read())
- expected[u'files'][filename][u'sha-1'] = h.hexdigest()
+ expected[u'files'][filename][u'sha-1'] = unicode(h.hexdigest())
actual = json.load(open(self.result, 'rb'))
self.assertEquals(expected, actual)
return expected
- def _execute(self, args, need_output=False):
+ def _execute(self, filename, args, need_output=False):
cmd = [
sys.executable, os.path.join(ROOT_DIR, 'isolate.py'),
- '--root', ROOT_DIR,
+ '--variable', 'DEPTH=%s' % ROOT_DIR,
'--result', self.result,
- ]
+ os.path.join(ROOT_DIR, 'data', 'isolate', filename),
+ ] + args
+ env = os.environ.copy()
+ if 'ISOLATE_DEBUG' in env:
+ del env['ISOLATE_DEBUG']
if need_output or not VERBOSE:
stdout = subprocess.PIPE
stderr = subprocess.STDOUT
@@ -98,6 +124,7 @@ class Isolate(unittest.TestCase):
stdout=stdout,
stderr=stderr,
cwd=cwd,
+ env=env,
universal_newlines=True)
out = p.communicate()[0]
if p.returncode:
@@ -126,70 +153,40 @@ class Isolate(unittest.TestCase):
self._expected_tree([])
def test_check(self):
- cmd = [
- '--mode', 'check',
- FILENAME,
- ]
- self._execute(cmd)
+ self._execute('fail.isolate', ['--mode', 'check'])
self._expected_tree(['result'])
self._expected_result(
- False,
- [FILENAME],
- [os.path.join('.', FILENAME)],
- False)
+ False, dict((f, {}) for f in self.files), ['child.py', '--fail'], False)
- def test_check_non_existant(self):
- cmd = [
- '--mode', 'check',
- 'NonExistentFile',
- ]
+ def test_check_no_run(self):
+ self._execute('no_run.isolate', ['--mode', 'check'])
+ self._expected_tree(['result'])
+ self._expected_result(
+ False, dict((f, {}) for f in self.files), None, False)
+
+ def test_check_non_existent(self):
try:
- self._execute(cmd)
+ self._execute('non_existent.isolate', ['--mode', 'check'])
self.fail()
except subprocess.CalledProcessError:
pass
self._expected_tree([])
def test_check_directory_no_slash(self):
- cmd = [
- '--mode', 'check',
- # Trailing slash missing.
- os.path.join('data', 'isolate'),
- ]
try:
- self._execute(cmd)
+ self._execute('missing_trailing_slash.isolate', ['--mode', 'check'])
self.fail()
except subprocess.CalledProcessError:
pass
self._expected_tree([])
- def test_check_abs_path(self):
- cmd = [
- '--mode', 'check',
- FILENAME,
- '--',
- os.path.join(ROOT_DIR, FILENAME),
- ]
- self._execute(cmd)
- self._expected_tree(['result'])
- self._expected_result(
- False, [FILENAME], [FILENAME], False)
-
def test_hashtable(self):
cmd = [
'--mode', 'hashtable',
'--outdir', self.tempdir,
- FILENAME,
- os.path.join('data', 'isolate') + os.path.sep,
]
- self._execute(cmd)
- files = [
- FILENAME,
- os.path.join('data', 'isolate', 'test_file1.txt'),
- os.path.join('data', 'isolate', 'test_file2.txt'),
- ]
- data = self._expected_result(
- True, files, [os.path.join('.', FILENAME)], False)
+ self._execute('no_run.isolate', cmd)
+ data = self._expected_result(True, self.files, None, False)
self._expected_tree(
[f['sha-1'] for f in data['files'].itervalues()] + ['result'])
@@ -197,91 +194,60 @@ class Isolate(unittest.TestCase):
cmd = [
'--mode', 'remap',
'--outdir', self.tempdir,
- FILENAME,
]
- self._execute(cmd)
- self._expected_tree([FILENAME, 'result'])
+ self._execute('no_run.isolate', cmd)
+ self._expected_tree(['data', 'result'])
self._expected_result(
False,
- [FILENAME],
- [os.path.join('.', FILENAME)],
+ self.files,
+ None,
False)
def test_run(self):
- cmd = [
- '--mode', 'run',
- FILENAME,
- '--',
- sys.executable, FILENAME, '--ok',
- ]
- self._execute(cmd)
+ self._execute('ok.isolate', ['--mode', 'run'])
self._expected_tree(['result'])
# cmd[0] is not generated from infiles[0] so it's not using a relative path.
self._expected_result(
- False, [FILENAME], [FILENAME, '--ok'], False)
+ False, self.files, ['child.py', '--ok'], False)
def test_run_fail(self):
- cmd = [
- '--mode', 'run',
- FILENAME,
- '--',
- sys.executable, FILENAME, '--fail',
- ]
try:
- self._execute(cmd)
+ self._execute('fail.isolate', ['--mode', 'run'])
self.fail()
except subprocess.CalledProcessError:
pass
- self._expected_tree([])
+ self._expected_tree(['result'])
def test_trace(self):
- cmd = [
- '--mode', 'trace',
- FILENAME,
- '--',
- sys.executable, os.path.join(ROOT_DIR, FILENAME), '--ok',
- ]
- out = self._execute(cmd, True)
- expected_tree = ['result', 'result.log']
- if sys.platform == 'win32':
- expected_tree.append('result.log.etl')
- self._expected_tree(expected_tree)
+ out = self._execute('ok.isolate', ['--mode', 'trace'], True)
+ self._expected_tree(['result', 'result.log'])
# The 'result.log' log is OS-specific so we can't read it but we can read
# the gyp result.
# cmd[0] is not generated from infiles[0] so it's not using a relative path.
self._expected_result(
- False, [FILENAME], [FILENAME, '--ok'], False)
+ False, self.files, ['child.py', '--ok'], False)
expected_value = {
'conditions': [
- ['OS=="%s"' % self.isolate.trace_inputs.get_flavor(), {
+ ['OS=="%s"' % isolate.trace_inputs.get_flavor(), {
'variables': {
- 'isolate_files': [
- '<(DEPTH)/%s' % FILENAME,
+ isolate.trace_inputs.KEY_TRACKED: [
+ 'child.py',
+ ],
+ isolate.trace_inputs.KEY_UNTRACKED: [
+ 'files1/',
],
},
}],
],
}
expected_buffer = cStringIO.StringIO()
- self.isolate.trace_inputs.pretty_print(expected_value, expected_buffer)
+ isolate.trace_inputs.pretty_print(expected_value, expected_buffer)
self.assertEquals(expected_buffer.getvalue(), out)
-def main():
- global VERBOSE
- VERBOSE = '-v' in sys.argv
- level = logging.DEBUG if VERBOSE else logging.ERROR
- logging.basicConfig(level=level)
- if len(sys.argv) == 1:
- unittest.main()
- if sys.argv[1] == '--ok':
- return 0
- if sys.argv[1] == '--fail':
- return 1
-
- unittest.main()
-
if __name__ == '__main__':
- sys.exit(main())
+ VERBOSE = '-v' in sys.argv
+ logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
+ unittest.main()
diff --git a/tools/isolate/isolate_test.py b/tools/isolate/isolate_test.py
new file mode 100755
index 0000000..3e5eac9
--- /dev/null
+++ b/tools/isolate/isolate_test.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import isolate
+
+
+class Isolate(unittest.TestCase):
+ def test_load_empty(self):
+ content = "{}"
+ variables = {}
+ command, infiles, read_only = isolate.load_isolate(
+ content, variables, self.fail)
+ self.assertEquals([], command)
+ self.assertEquals([], infiles)
+ self.assertEquals(None, read_only)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/isolate/merge_isolate.py b/tools/isolate/merge_isolate.py
index fd4a0d3..6170c7f 100755
--- a/tools/isolate/merge_isolate.py
+++ b/tools/isolate/merge_isolate.py
@@ -18,6 +18,8 @@ import re
import sys
import trace_inputs
+# Create shortcuts.
+from trace_inputs import KEY_TRACKED, KEY_UNTRACKED
def union(lhs, rhs):
@@ -28,43 +30,17 @@ def union(lhs, rhs):
if rhs is None:
return copy.deepcopy(lhs)
assert type(lhs) == type(rhs), (lhs, rhs)
+ if hasattr(lhs, 'union'):
+ # Includes set, OSSettings and Configs.
+ return lhs.union(rhs)
if isinstance(lhs, dict):
return dict((k, union(lhs.get(k), rhs.get(k))) for k in set(lhs).union(rhs))
- elif isinstance(lhs, set):
- # Do not go inside the set.
- return lhs.union(rhs)
elif isinstance(lhs, list):
# Do not go inside the list.
return lhs + rhs
assert False, type(lhs)
-def process_variables(for_os, variables):
- """Extracts files and dirs from the |variables| dict.
-
- Returns a list of exactly two items. Each item is a dict that maps a string
- to a set (of strings).
-
- In the first item, the keys are file names, and the values are sets of OS
- names, like "win" or "mac". In the second item, the keys are directory names,
- and the values are sets of OS names too.
- """
- VALID_VARIABLES = ['isolate_files', 'isolate_dirs']
-
- # Verify strictness.
- assert isinstance(variables, dict), variables
- assert set(VALID_VARIABLES).issuperset(set(variables)), variables.keys()
- for items in variables.itervalues():
- assert isinstance(items, list), items
- assert all(isinstance(i, basestring) for i in items), items
-
- # Returns [files, dirs]
- return [
- dict((name, set([for_os])) for name in variables.get(var, []))
- for var in VALID_VARIABLES
- ]
-
-
def eval_content(content):
"""Evaluates a GYP file and return the value defined in it."""
globs = {'__builtins__': None}
@@ -75,50 +51,262 @@ def eval_content(content):
return value
-def _process_inner(for_os, inner, old_files, old_dirs, old_os):
- """Processes the variables inside a condition.
+def verify_variables(variables):
+ """Verifies the |variables| dictionary is in the expected format."""
+ VALID_VARIABLES = [
+ KEY_TRACKED,
+ KEY_UNTRACKED,
+ 'command',
+ 'read_only',
+ ]
+ assert isinstance(variables, dict), variables
+ assert set(VALID_VARIABLES).issuperset(set(variables)), variables.keys()
+ for name, value in variables.iteritems():
+ if name == 'read_only':
+ assert value in (True, False, None), value
+ else:
+ assert isinstance(value, list), value
+ assert all(isinstance(i, basestring) for i in value), value
+
+
+def verify_condition(condition):
+ """Verifies the |condition| dictionary is in the expected format."""
+ VALID_INSIDE_CONDITION = ['variables']
+ assert isinstance(condition, list), condition
+ assert 2 <= len(condition) <= 3, condition
+ assert re.match(r'OS==\"([a-z]+)\"', condition[0]), condition[0]
+ for c in condition[1:]:
+ assert isinstance(c, dict), c
+ assert set(VALID_INSIDE_CONDITION).issuperset(set(c)), c.keys()
+ verify_variables(c.get('variables', {}))
- Only meant to be called by parse_gyp_dict().
- Args:
- - for_os: OS where the references are tracked for.
- - inner: Inner dictionary to process.
- - old_files: Previous list of files to union with.
- - old_dirs: Previous list of directories to union with.
- - old_os: Previous list of OSes referenced to union with.
+def verify_root(value):
+ VALID_ROOTS = ['variables', 'conditions']
+ assert isinstance(value, dict), value
+ assert set(VALID_ROOTS).issuperset(set(value)), value.keys()
+ verify_variables(value.get('variables', {}))
- Returns:
- - A tuple of (files, dirs, os) where each list is a union of the new
- dependencies found for this OS, as referenced by for_os, and the previous
- list.
+ conditions = value.get('conditions', [])
+ assert isinstance(conditions, list), conditions
+ for condition in conditions:
+ verify_condition(condition)
+
+
+class OSSettings(object):
+ """Represents the dependencies for an OS. The structure is immutable."""
+ def __init__(self, name, values):
+ self.name = name
+ verify_variables(values)
+ self.tracked = sorted(values.get(KEY_TRACKED, []))
+ self.untracked = sorted(values.get(KEY_UNTRACKED, []))
+ self.command = values.get('command', [])[:]
+ self.read_only = values.get('read_only')
+
+ def union(self, rhs):
+ assert self.name == rhs.name
+ assert not (self.command and rhs.command)
+ var = {
+ KEY_TRACKED: sorted(self.tracked + rhs.tracked),
+ KEY_UNTRACKED: sorted(self.untracked + rhs.untracked),
+ 'command': self.command or rhs.command,
+ 'read_only': rhs.read_only if self.read_only is None else self.read_only,
+ }
+ return OSSettings(self.name, var)
+
+ def flatten(self):
+ out = {}
+ if self.command:
+ out['command'] = self.command
+ if self.tracked:
+ out[KEY_TRACKED] = self.tracked
+ if self.untracked:
+ out[KEY_UNTRACKED] = self.untracked
+ if self.read_only is not None:
+ out['read_only'] = self.read_only
+ return out
+
+
+class Configs(object):
+ """Represents all the OS-specific configurations.
+
+ The self.per_os[None] member contains all the 'else' clauses plus the default
+ values. It is not included in the flatten() result.
"""
- assert isinstance(inner, dict), inner
- assert set(['variables']).issuperset(set(inner)), inner.keys()
- new_files, new_dirs = process_variables(for_os, inner.get('variables', {}))
- if new_files or new_dirs:
- old_os = old_os.union([for_os.lstrip('!')])
- return union(old_files, new_files), union(old_dirs, new_dirs), old_os
+ def __init__(self, oses):
+ self.per_os = {
+ None: OSSettings(None, {}),
+ }
+ self.per_os.update(dict((name, OSSettings(name, {})) for name in oses))
+
+ def union(self, rhs):
+ out = Configs(list(set(self.per_os.keys() + rhs.per_os.keys())))
+ for value in self.per_os.itervalues():
+ # TODO(maruel): FAIL
+ out = out.union(value)
+ for value in rhs.per_os.itervalues():
+ out = out.union(value)
+ return out
+
+ def add_globals(self, values):
+ for key in self.per_os:
+ self.per_os[key] = self.per_os[key].union(OSSettings(key, values))
+
+ def add_values(self, for_os, values):
+ self.per_os[for_os] = self.per_os[for_os].union(OSSettings(for_os, values))
+
+ def add_negative_values(self, for_os, values):
+ """Includes the variables to all OSes except |for_os|.
+
+ This includes 'None' so unknown OSes gets it too.
+ """
+ for key in self.per_os:
+ if key != for_os:
+ self.per_os[key] = self.per_os[key].union(OSSettings(key, values))
+ def flatten(self):
+ """Returns a flat dictionary representation of the configuration.
-def parse_gyp_dict(value):
- """Parses a gyp dict as returned by eval_content().
+ Skips None pseudo-OS.
+ """
+ return dict(
+ (k, v.flatten()) for k, v in self.per_os.iteritems() if k is not None)
- |value| is the loaded dictionary that was defined in the gyp file.
- Returns a 3-tuple, where the first two items are the same as the items
- returned by process_variable() in the same order, and the last item is a set
- of strings of all OSs seen in the input dict.
+def invert_map(variables):
+ """Converts a dict(OS, dict(deptype, list(dependencies)) to a flattened view.
+
+ Returns a tuple of:
+ 1. dict(deptype, dict(dependency, set(OSes)) for easier processing.
+ 2. All the OSes found as a set.
+ """
+ KEYS = (
+ KEY_TRACKED,
+ KEY_UNTRACKED,
+ 'command',
+ 'read_only',
+ )
+ out = dict((key, {}) for key in KEYS)
+ for os_name, values in variables.iteritems():
+ for key in (KEY_TRACKED, KEY_UNTRACKED):
+ for item in values.get(key, []):
+ out[key].setdefault(item, set()).add(os_name)
+
+ # command needs special handling.
+ command = tuple(values.get('command', []))
+ out['command'].setdefault(command, set()).add(os_name)
+
+ # read_only needs special handling.
+ out['read_only'].setdefault(values.get('read_only'), set()).add(os_name)
+ return out, set(variables)
+
+
+def reduce_inputs(values, oses):
+ """Reduces the invert_map() output to the strictest minimum list.
+
+ 1. Construct the inverse map first.
+ 2. Look at each individual file and directory, map where they are used and
+ reconstruct the inverse dictionary.
+ 3. Do not convert back to negative if only 2 OSes were merged.
+
+ Returns a tuple of:
+ 1. the minimized dictionary
+ 2. oses passed through as-is.
+ """
+ KEYS = (
+ KEY_TRACKED,
+ KEY_UNTRACKED,
+ 'command',
+ 'read_only',
+ )
+ out = dict((key, {}) for key in KEYS)
+ assert all(oses), oses
+ if len(oses) > 2:
+ for key in KEYS:
+ for item, item_oses in values.get(key, {}).iteritems():
+ # Converts all oses.difference('foo') to '!foo'.
+ assert all(item_oses), item_oses
+ missing = oses.difference(item_oses)
+ if len(missing) == 1:
+ # Replace it with a negative.
+ out[key][item] = set(['!' + tuple(missing)[0]])
+ elif not missing:
+ out[key][item] = set([None])
+ else:
+ out[key][item] = set(item_oses)
+ return out, oses
+
+
+def convert_map_to_gyp(values, oses):
+ """Regenerates back a gyp-like configuration dict from files and dirs
+ mappings generated from reduce_inputs().
+ """
+ # First, inverse the mapping to make it dict first.
+ config = {}
+ for key in values:
+ for item, oses in values[key].iteritems():
+ if item is None:
+ # For read_only default.
+ continue
+ for cond_os in oses:
+ cond_key = None if cond_os is None else cond_os.lstrip('!')
+ # Insert the if/else dicts.
+ condition_values = config.setdefault(cond_key, [{}, {}])
+ # If condition is negative, use index 1, else use index 0.
+ cond_value = condition_values[int((cond_os or '').startswith('!'))]
+ variables = cond_value.setdefault('variables', {})
+
+ if item in (True, False):
+ # One-off for read_only.
+ variables[key] = item
+ else:
+ if isinstance(item, tuple):
+ # One-off for command.
+ # Do not merge lists and do not sort!
+ # Note that item is a tuple.
+ assert key not in variables
+ variables[key] = list(item)
+ else:
+ # The list of items (files or dirs). Append the new item and keep
+ # the list sorted.
+ l = variables.setdefault(key, [])
+ l.append(item)
+ l.sort()
+
+ out = {}
+ for o in sorted(config):
+ d = config[o]
+ if o is None:
+ assert not d[1]
+ out = union(out, d[0])
+ else:
+ c = out.setdefault('conditions', [])
+ if d[1]:
+ c.append(['OS=="%s"' % o] + d)
+ else:
+ c.append(['OS=="%s"' % o] + d[0:1])
+ return out
+
+
+def load_gyp(value):
+ """Parses one gyp skeleton and returns a Configs() instance.
+
+ |value| is the loaded dictionary that was defined in the gyp file.
The expected format is strict, anything diverting from the format below will
- fail:
+ throw an assert:
{
'variables': {
- 'isolate_files': [
+ 'command': [
+ ...
+ ],
+ 'isolate_dependency_tracked': [
...
],
- 'isolate_dirs: [
+ 'isolate_dependency_untracked': [
...
],
+ 'read_only': False,
},
'conditions': [
['OS=="<os>"', {
@@ -134,165 +322,59 @@ def parse_gyp_dict(value):
],
}
"""
- assert isinstance(value, dict), value
- VALID_ROOTS = ['variables', 'conditions']
- assert set(VALID_ROOTS).issuperset(set(value)), value.keys()
+ verify_root(value)
+
+ # Scan to get the list of OSes.
+ conditions = value.get('conditions', [])
+ oses = set(re.match(r'OS==\"([a-z]+)\"', c[0]).group(1) for c in conditions)
+ configs = Configs(oses)
# Global level variables.
- oses = set()
- files, dirs = process_variables(None, value.get('variables', {}))
+ configs.add_globals(value.get('variables', {}))
# OS specific variables.
- conditions = value.get('conditions', [])
- assert isinstance(conditions, list), conditions
for condition in conditions:
- assert isinstance(condition, list), condition
- assert 2 <= len(condition) <= 3, condition
- m = re.match(r'OS==\"([a-z]+)\"', condition[0])
- assert m, condition[0]
- condition_os = m.group(1)
-
- files, dirs, oses = _process_inner(
- condition_os, condition[1], files, dirs, oses)
-
- if len(condition) == 3:
- files, dirs, oses = _process_inner(
- '!' + condition_os, condition[2], files, dirs, oses)
+ condition_os = re.match(r'OS==\"([a-z]+)\"', condition[0]).group(1)
+ configs.add_values(condition_os, condition[1].get('variables', {}))
+ if len(condition) > 2:
+ configs.add_negative_values(
+ condition_os, condition[2].get('variables', {}))
+ return configs
- # TODO(maruel): _expand_negative() should be called here, because otherwise
- # the OSes the negative condition represents is lost once the gyps are merged.
- # This cause an invalid expansion in reduce_inputs() call.
- return files, dirs, oses
-
-def parse_gyp_dicts(gyps):
+def load_gyps(items):
"""Parses each gyp file and returns the merged results.
- It only loads what parse_gyp_dict() can process.
+ It only loads what load_gyp() can process.
Return values:
files: dict(filename, set(OS where this filename is a dependency))
dirs: dict(dirame, set(OS where this dirname is a dependency))
oses: set(all the OSes referenced)
"""
- files = {}
- dirs = {}
- oses = set()
- for gyp in gyps:
- with open(gyp, 'rb') as gyp_file:
- content = gyp_file.read()
- gyp_files, gyp_dirs, gyp_oses = parse_gyp_dict(eval_content(content))
- files = union(gyp_files, files)
- dirs = union(gyp_dirs, dirs)
- oses |= gyp_oses
- return files, dirs, oses
-
-
-def _expand_negative(items, oses):
- """Converts all '!foo' value in the set by oses.difference('foo')."""
- assert None not in oses and len(oses) >= 2, oses
- for name in items:
- if None in items[name]:
- # Shortcut any item having None in their set. An item listed in None means
- # the item is a dependency on all OSes. As such, there is no need to list
- # any OS.
- items[name] = set([None])
- continue
- for neg in [o for o in items[name] if o.startswith('!')]:
- # Replace it with the inverse.
- items[name] = items[name].union(oses.difference([neg[1:]]))
- items[name].remove(neg)
- if items[name] == oses:
- items[name] = set([None])
-
-
-def _compact_negative(items, oses):
- """Converts all oses.difference('foo') to '!foo'.
-
- It is doing the reverse of _expand_negative().
- """
- assert None not in oses and len(oses) >= 3, oses
- for name in items:
- missing = oses.difference(items[name])
- if len(missing) == 1:
- # Replace it with a negative.
- items[name] = set(['!' + tuple(missing)[0]])
-
-
-def reduce_inputs(files, dirs, oses):
- """Reduces the variables to their strictest minimum."""
- # Construct the inverse map first.
- # Look at each individual file and directory, map where they are used and
- # reconstruct the inverse dictionary.
- # First, expands all '!' builders into the reverse.
- # TODO(maruel): This is too late to call _expand_negative(). The exact list
- # negative OSes condition it represents is lost at that point.
- _expand_negative(files, oses)
- _expand_negative(dirs, oses)
-
- # Do not convert back to negative if only 2 OSes were merged. It is easier to
- # read this way.
- if len(oses) > 2:
- _compact_negative(files, oses)
- _compact_negative(dirs, oses)
-
- return files, dirs
-
-
-def convert_to_gyp(files, dirs):
- """Regenerates back a gyp-like configuration dict from files and dirs
- mappings.
-
- Sort the lists.
- """
- # First, inverse the mapping to make it dict first.
- config = {}
- def to_cond(items, name):
- for item, oses in items.iteritems():
- for cond_os in oses:
- condition_values = config.setdefault(
- None if cond_os is None else cond_os.lstrip('!'),
- [{}, {}])
- # If condition is negative, use index 1, else use index 0.
- condition_value = condition_values[int((cond_os or '').startswith('!'))]
- # The list of items (files or dirs). Append the new item and keep the
- # list sorted.
- l = condition_value.setdefault('variables', {}).setdefault(name, [])
- l.append(item)
- l.sort()
-
- to_cond(files, 'isolate_files')
- to_cond(dirs, 'isolate_dirs')
-
- out = {}
- for o in sorted(config):
- d = config[o]
- if o is None:
- assert not d[1]
- out = union(out, d[0])
- else:
- c = out.setdefault('conditions', [])
- if d[1]:
- c.append(['OS=="%s"' % o] + d)
- else:
- c.append(['OS=="%s"' % o] + d[0:1])
- return out
+ configs = Configs([])
+ for item in items:
+ configs = configs.union(load_gyp(eval_content(open(item, 'rb').read())))
+ return configs
-def main():
+def main(args=None):
parser = optparse.OptionParser(
usage='%prog <options> [file1] [file2] ...')
parser.add_option(
'-v', '--verbose', action='count', default=0, help='Use multiple times')
- options, args = parser.parse_args()
+ options, args = parser.parse_args(args)
level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)]
logging.basicConfig(
level=level,
format='%(levelname)5s %(module)15s(%(lineno)3d):%(message)s')
trace_inputs.pretty_print(
- convert_to_gyp(*reduce_inputs(*parse_gyp_dicts(args))),
+ convert_map_to_gyp(
+ *reduce_inputs(
+ *invert_map(
+ load_gyps(args).flatten()))),
sys.stdout)
return 0
diff --git a/tools/isolate/merge_isolate_test.py b/tools/isolate/merge_isolate_test.py
index bf82492..e110a74 100755
--- a/tools/isolate/merge_isolate_test.py
+++ b/tools/isolate/merge_isolate_test.py
@@ -6,182 +6,325 @@
import unittest
import merge_isolate
+# Create shortcuts.
+from merge_isolate import KEY_TRACKED, KEY_UNTRACKED
class MergeGyp(unittest.TestCase):
def test_unknown_key(self):
try:
- merge_isolate.process_variables(None, {'foo': [],})
+ merge_isolate.verify_variables({'foo': [],})
self.fail()
except AssertionError:
pass
def test_unknown_var(self):
try:
- merge_isolate.process_variables(None, {'variables': {'foo': [],}})
+ merge_isolate.verify_condition({'variables': {'foo': [],}})
self.fail()
except AssertionError:
pass
- def test_parse_gyp_dict_empty(self):
- f, d, o = merge_isolate.parse_gyp_dict({})
- self.assertEquals({}, f)
- self.assertEquals({}, d)
- self.assertEquals(set(), o)
+ def test_union(self):
+ value1 = {
+ 'a': set(['A']),
+ 'b': ['B', 'C'],
+ 'c': 'C',
+ }
+ value2 = {
+ 'a': set(['B', 'C']),
+ 'b': [],
+ 'd': set(),
+ }
+ expected = {
+ 'a': set(['A', 'B', 'C']),
+ 'b': ['B', 'C'],
+ 'c': 'C',
+ 'd': set(),
+ }
+ self.assertEquals(expected, merge_isolate.union(value1, value2))
+
+ def test_eval_content(self):
+ try:
+ # Intrinsics are not available.
+ merge_isolate.eval_content('map(str, [1, 2])')
+ self.fail()
+ except NameError:
+ pass
- def test_parse_gyp_dict(self):
+ def test_load_gyp_empty(self):
+ self.assertEquals({}, merge_isolate.load_gyp({}).flatten())
+
+ def test_load_gyp(self):
value = {
'variables': {
- 'isolate_files': [
- 'a',
- ],
- 'isolate_dirs': [
- 'b',
- ],
+ KEY_TRACKED: ['a'],
+ KEY_UNTRACKED: ['b'],
},
'conditions': [
['OS=="atari"', {
'variables': {
- 'isolate_files': [
- 'c',
- 'x',
- ],
- 'isolate_dirs': [
- 'd',
- ],
+ KEY_TRACKED: ['c', 'x'],
+ KEY_UNTRACKED: ['d'],
+ 'command': ['echo', 'Hello World'],
+ 'read_only': True,
},
}, { # else
'variables': {
- 'isolate_files': [
- 'e',
- 'x',
- ],
- 'isolate_dirs': [
- 'f',
- ],
+ KEY_TRACKED: ['e', 'x'],
+ KEY_UNTRACKED: ['f'],
+ 'command': ['echo', 'You should get an Atari'],
},
}],
['OS=="amiga"', {
'variables': {
- 'isolate_files': [
- 'g',
- ],
+ KEY_TRACKED: ['g'],
+ 'read_only': False,
},
}],
- ['OS=="inexistent"', {
+ ['OS=="dendy"', {
}],
['OS=="coleco"', {
}, { # else
'variables': {
- 'isolate_dirs': [
- 'h',
- ],
+ KEY_UNTRACKED: ['h'],
+ 'read_only': None,
},
}],
],
}
- expected_files = {
- 'a': set([None]),
- 'c': set(['atari']),
- 'e': set(['!atari']),
- 'g': set(['amiga']),
- 'x': set(['!atari', 'atari']), # potential for reduction
+ expected = {
+ 'amiga': {
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['a', 'e', 'g', 'x'],
+ KEY_UNTRACKED: ['b', 'f', 'h'],
+ 'read_only': False,
+ },
+ 'atari': {
+ 'command': ['echo', 'Hello World'],
+ KEY_TRACKED: ['a', 'c', 'x'],
+ KEY_UNTRACKED: ['b', 'd', 'h'],
+ 'read_only': True,
+ },
+ 'coleco': {
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['a', 'e', 'x'],
+ KEY_UNTRACKED: ['b', 'f'],
+ },
+ 'dendy': {
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['a', 'e', 'x'],
+ KEY_UNTRACKED: ['b', 'f', 'h'],
+ },
}
- expected_dirs = {
- 'b': set([None]),
- 'd': set(['atari']),
- 'f': set(['!atari']),
- 'h': set(['!coleco']),
+ self.assertEquals(expected, merge_isolate.load_gyp(value).flatten())
+
+ def test_load_gyp_duplicate_command(self):
+ value = {
+ 'variables': {
+ 'command': ['rm', '-rf', '/'],
+ },
+ 'conditions': [
+ ['OS=="atari"', {
+ 'variables': {
+ 'command': ['echo', 'Hello World'],
+ },
+ }],
+ ],
}
- # coleco is included even if only negative.
- expected_oses = set(['atari', 'amiga', 'coleco'])
- actual_files, actual_dirs, actual_oses = merge_isolate.parse_gyp_dict(value)
- self.assertEquals(expected_files, actual_files)
- self.assertEquals(expected_dirs, actual_dirs)
- self.assertEquals(expected_oses, actual_oses)
+ try:
+ merge_isolate.load_gyp(value)
+ self.fail()
+ except AssertionError:
+ pass
- def test_reduce_inputs(self):
- value_files = {
- 'a': set([None]),
- 'c': set(['atari']),
- 'e': set(['!atari']),
- 'g': set(['amiga']),
- 'x': set(['!atari', 'atari']),
+ def test_load_gyp_no_condition(self):
+ value = {
+ 'variables': {
+ KEY_TRACKED: ['a'],
+ KEY_UNTRACKED: ['b'],
+ },
}
- value_dirs = {
- 'b': set([None]),
- 'd': set(['atari']),
- 'f': set(['!atari']),
- 'h': set(['!coleco']),
+ expected = {
+ KEY_TRACKED: ['a'],
+ KEY_UNTRACKED: ['b'],
}
- value_oses = set(['atari', 'amiga', 'coleco'])
- expected_files = {
- 'a': set([None]),
- 'c': set(['atari']),
- 'e': set(['!atari']),
- 'g': set(['amiga']),
- 'x': set([None]), # Reduced.
+ actual = merge_isolate.load_gyp(value)
+ # Flattening the whole config will discard 'None'.
+ self.assertEquals({}, actual.flatten())
+ self.assertEquals([None], actual.per_os.keys())
+ # But the 'None' value is still available as a backup.
+ self.assertEquals(expected, actual.per_os[None].flatten())
+
+ def test_invert_map(self):
+ value = {
+ 'amiga': {
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['a', 'e', 'g', 'x'],
+ KEY_UNTRACKED: ['b', 'f', 'h'],
+ 'read_only': False,
+ },
+ 'atari': {
+ 'command': ['echo', 'Hello World'],
+ KEY_TRACKED: ['a', 'c', 'x'],
+ KEY_UNTRACKED: ['b', 'd', 'h'],
+ 'read_only': True,
+ },
+ 'coleco': {
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['a', 'e', 'x'],
+ KEY_UNTRACKED: ['b', 'f'],
+ },
+ 'dendy': {
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['a', 'e', 'x'],
+ KEY_UNTRACKED: ['b', 'f', 'h'],
+ },
}
- expected_dirs = {
- 'b': set([None]),
- 'd': set(['atari']),
- 'f': set(['!atari']),
- 'h': set(['!coleco']),
+ expected_values = {
+ 'command': {
+ ('echo', 'Hello World'): set(['atari']),
+ ('echo', 'You should get an Atari'): set(['amiga', 'coleco', 'dendy']),
+ },
+ KEY_TRACKED: {
+ 'a': set(['amiga', 'atari', 'coleco', 'dendy']),
+ 'c': set(['atari']),
+ 'e': set(['amiga', 'coleco', 'dendy']),
+ 'g': set(['amiga']),
+ 'x': set(['amiga', 'atari', 'coleco', 'dendy']),
+ },
+ KEY_UNTRACKED: {
+ 'b': set(['amiga', 'atari', 'coleco', 'dendy']),
+ 'd': set(['atari']),
+ 'f': set(['amiga', 'coleco', 'dendy']),
+ 'h': set(['amiga', 'atari', 'dendy']),
+ },
+ 'read_only': {
+ None: set(['coleco', 'dendy']),
+ False: set(['amiga']),
+ True: set(['atari']),
+ },
}
- actual_files, actual_dirs = merge_isolate.reduce_inputs(
- value_files, value_dirs, value_oses)
- self.assertEquals(expected_files, actual_files)
- self.assertEquals(expected_dirs, actual_dirs)
-
- def test_convert_to_gyp(self):
- files = {
- 'a': set([None]),
- 'x': set([None]),
-
- 'g': set(['amiga']),
+ expected_oses = set(['amiga', 'atari', 'coleco', 'dendy'])
+ actual_values, actual_oses = merge_isolate.invert_map(value)
+ self.assertEquals(expected_values, actual_values)
+ self.assertEquals(expected_oses, actual_oses)
- 'c': set(['atari']),
- 'e': set(['!atari']),
+ def test_reduce_inputs(self):
+ values = {
+ 'command': {
+ ('echo', 'Hello World'): set(['atari']),
+ ('echo', 'You should get an Atari'): set(['amiga', 'coleco', 'dendy']),
+ },
+ KEY_TRACKED: {
+ 'a': set(['amiga', 'atari', 'coleco', 'dendy']),
+ 'c': set(['atari']),
+ 'e': set(['amiga', 'coleco', 'dendy']),
+ 'g': set(['amiga']),
+ 'x': set(['amiga', 'atari', 'coleco', 'dendy']),
+ },
+ KEY_UNTRACKED: {
+ 'b': set(['amiga', 'atari', 'coleco', 'dendy']),
+ 'd': set(['atari']),
+ 'f': set(['amiga', 'coleco', 'dendy']),
+ 'h': set(['amiga', 'atari', 'dendy']),
+ },
+ 'read_only': {
+ None: set(['coleco', 'dendy']),
+ False: set(['amiga']),
+ True: set(['atari']),
+ },
}
- dirs = {
- 'b': set([None]),
-
- 'd': set(['atari']),
- 'f': set(['!atari']),
+ oses = set(['amiga', 'atari', 'coleco', 'dendy'])
+ expected_values = {
+ 'command': {
+ ('echo', 'Hello World'): set(['atari']),
+ ('echo', 'You should get an Atari'): set(['!atari']),
+ },
+ KEY_TRACKED: {
+ 'a': set([None]),
+ 'c': set(['atari']),
+ 'e': set(['!atari']),
+ 'g': set(['amiga']),
+ 'x': set([None]),
+ },
+ KEY_UNTRACKED: {
+ 'b': set([None]),
+ 'd': set(['atari']),
+ 'f': set(['!atari']),
+ 'h': set(['!coleco']),
+ },
+ 'read_only': {
+ None: set(['coleco', 'dendy']),
+ False: set(['amiga']),
+ True: set(['atari']),
+ },
+ }
+ actual_values, actual_oses = merge_isolate.reduce_inputs(values, oses)
+ self.assertEquals(expected_values, actual_values)
+ self.assertEquals(oses, actual_oses)
- 'h': set(['!coleco']),
+ def test_convert_map_to_gyp(self):
+ values = {
+ 'command': {
+ ('echo', 'Hello World'): set(['atari']),
+ ('echo', 'You should get an Atari'): set(['!atari']),
+ },
+ KEY_TRACKED: {
+ 'a': set([None]),
+ 'c': set(['atari']),
+ 'e': set(['!atari']),
+ 'g': set(['amiga']),
+ 'x': set([None]),
+ },
+ KEY_UNTRACKED: {
+ 'b': set([None]),
+ 'd': set(['atari']),
+ 'f': set(['!atari']),
+ 'h': set(['!coleco']),
+ },
+ 'read_only': {
+ None: set(['coleco', 'dendy']),
+ False: set(['amiga']),
+ True: set(['atari']),
+ },
}
+ oses = set(['amiga', 'atari', 'coleco', 'dendy'])
expected = {
'variables': {
- 'isolate_dirs': ['b'],
- 'isolate_files': ['a', 'x'],
+ KEY_TRACKED: ['a', 'x'],
+ KEY_UNTRACKED: ['b'],
},
'conditions': [
['OS=="amiga"', {
'variables': {
- 'isolate_files': ['g'],
+ KEY_TRACKED: ['g'],
+ 'read_only': False,
},
}],
['OS=="atari"', {
'variables': {
- 'isolate_dirs': ['d'],
- 'isolate_files': ['c'],
+ 'command': ['echo', 'Hello World'],
+ KEY_TRACKED: ['c'],
+ KEY_UNTRACKED: ['d'],
+ 'read_only': True,
},
}, {
'variables': {
- 'isolate_dirs': ['f'],
- 'isolate_files': ['e'],
+ 'command': ['echo', 'You should get an Atari'],
+ KEY_TRACKED: ['e'],
+ KEY_UNTRACKED: ['f'],
},
}],
['OS=="coleco"', {
}, {
'variables': {
- 'isolate_dirs': ['h'],
+ KEY_UNTRACKED: ['h'],
},
}],
],
}
- self.assertEquals(expected, merge_isolate.convert_to_gyp(files, dirs))
+ self.assertEquals(expected, merge_isolate.convert_map_to_gyp(values, oses))
if __name__ == '__main__':
diff --git a/tools/isolate/trace_inputs.py b/tools/isolate/trace_inputs.py
index 7de92f7..0ee69b4 100755
--- a/tools/isolate/trace_inputs.py
+++ b/tools/isolate/trace_inputs.py
@@ -24,6 +24,9 @@ import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+KEY_TRACKED = 'isolate_dependency_tracked'
+KEY_UNTRACKED = 'isolate_dependency_untracked'
+
if sys.platform == 'win32':
from ctypes.wintypes import create_unicode_buffer
@@ -119,6 +122,16 @@ def isEnabledFor(level):
return logging.getLogger().isEnabledFor(level)
+def fix_python_path(cmd):
+ """Returns the fixed command line to call the right python executable."""
+ out = cmd[:]
+ if out[0] == 'python':
+ out[0] = sys.executable
+ elif out[0].endswith('.py'):
+ out.insert(0, sys.executable)
+ return out
+
+
class Strace(object):
"""strace implies linux."""
IGNORED = (
@@ -1161,11 +1174,13 @@ def trace_inputs(logfile, cmd, root_dir, cwd_dir, product_dir, force_trace):
# It is important to have unambiguous path.
assert os.path.isabs(root_dir), root_dir
assert os.path.isabs(logfile), logfile
+ assert not cwd_dir or not os.path.isabs(cwd_dir), cwd_dir
+ assert not product_dir or not os.path.isabs(product_dir), product_dir
+
+ cmd = fix_python_path(cmd)
assert (
(os.path.isfile(logfile) and not force_trace) or os.path.isabs(cmd[0])
), cmd[0]
- assert not cwd_dir or not os.path.isabs(cwd_dir), cwd_dir
- assert not product_dir or not os.path.isabs(product_dir), product_dir
# Resolve any symlink
root_dir = os.path.realpath(root_dir)
@@ -1268,9 +1283,9 @@ def trace_inputs(logfile, cmd, root_dir, cwd_dir, product_dir, force_trace):
dirs = [f for f in corrected if f.endswith('/')]
variables = {}
if files:
- variables['isolate_files'] = files
+ variables[KEY_TRACKED] = files
if dirs:
- variables['isolate_dirs'] = dirs
+ variables[KEY_UNTRACKED] = dirs
value = {
'conditions': [
['OS=="%s"' % flavor, {
diff --git a/tools/isolate/trace_inputs_smoke_test.py b/tools/isolate/trace_inputs_smoke_test.py
index be55ad2..dec35e6 100755
--- a/tools/isolate/trace_inputs_smoke_test.py
+++ b/tools/isolate/trace_inputs_smoke_test.py
@@ -102,11 +102,11 @@ class TraceInputs(unittest.TestCase):
'conditions': [
['OS=="%s"' % trace_inputs.get_flavor(), {
'variables': {
- 'isolate_files': [
+ 'isolate_dependency_tracked': [
'<(DEPTH)/trace_inputs.py',
'<(DEPTH)/%s' % FILENAME,
],
- 'isolate_dirs': [
+ 'isolate_dependency_untracked': [
'trace_inputs/',
],
},