summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authormaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-05-09 20:18:48 +0000
committermaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-05-09 20:18:48 +0000
commitd86fbbebdddc19e02311a27c8e44ec4119458cd2 (patch)
tree6d7cc4cff92b337adb60da3e2909f31bdd06c925 /tools
parent4fc20d17c45d997b19e75bc43ca4d0ae8edac82a (diff)
downloadchromium_src-d86fbbebdddc19e02311a27c8e44ec4119458cd2.zip
chromium_src-d86fbbebdddc19e02311a27c8e44ec4119458cd2.tar.gz
chromium_src-d86fbbebdddc19e02311a27c8e44ec4119458cd2.tar.bz2
Complete rewrite of isolate.py to be more modular.
Use an OO approach to store the state. Make the code clearer. 1. Remove 'variables' from foo.result and create foo.state for less important state. 2. Store and manage the file paths in native os.path.sep. Simplify the whole code. 3. Remove the requirement of providing a .isolate file to isolate.py when providing an existing .result file. Make it reuse the result file's data as needed, simplifying user's life. R=nsylvain@chromium.org BUG=98637 TEST=manually ran all the tests on linux and windows Review URL: https://chromiumcodereview.appspot.com/10387037 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@136111 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools')
-rwxr-xr-xtools/isolate/isolate.py732
-rwxr-xr-xtools/isolate/isolate_smoke_test.py108
-rwxr-xr-xtools/isolate/isolate_test.py104
3 files changed, 562 insertions, 382 deletions
diff --git a/tools/isolate/isolate.py b/tools/isolate/isolate.py
index 2c102c3..57346eb 100755
--- a/tools/isolate/isolate.py
+++ b/tools/isolate/isolate.py
@@ -25,7 +25,6 @@ import json
import logging
import optparse
import os
-import posixpath
import re
import stat
import subprocess
@@ -36,43 +35,26 @@ import merge_isolate
import trace_inputs
import run_test_from_archive
-# Used by process_inputs().
+# Used by process_input().
NO_INFO, STATS_ONLY, WITH_HASH = range(56, 59)
def relpath(path, root):
- """os.path.relpath() that keeps trailing slash."""
+ """os.path.relpath() that keeps trailing os.path.sep."""
out = os.path.relpath(path, root)
if path.endswith(os.path.sep):
out += os.path.sep
- elif sys.platform == 'win32' and path.endswith('/'):
- # TODO(maruel): Temporary.
- out += os.path.sep
return out
def normpath(path):
- """os.path.normpath() that keeps trailing slash."""
+ """os.path.normpath() that keeps trailing os.path.sep."""
out = os.path.normpath(path)
- if path.endswith(('/', os.path.sep)):
+ if path.endswith(os.path.sep):
out += os.path.sep
return out
-def to_relative(path, root, relative):
- """Converts any absolute path to a relative path, only if under root."""
- if sys.platform == 'win32':
- path = path.lower()
- root = root.lower()
- relative = relative.lower()
- if path.startswith(root):
- logging.info('%s starts with %s' % (path, root))
- path = os.path.relpath(path, relative)
- else:
- logging.info('%s not under %s' % (path, root))
- return path
-
-
def expand_directories(indir, infiles, blacklist):
"""Expands the directories, applies the blacklist and verifies files exist."""
logging.debug('expand_directories(%s, %s, %s)' % (indir, infiles, blacklist))
@@ -126,9 +108,17 @@ def eval_variables(item, variables):
replace_variable(p, variables) for p in re.split(r'(<\([A-Z_]+\))', item))
+def indent(data, indent_length):
+ """Indents text."""
+ spacing = ' ' * indent_length
+ return ''.join(spacing + l for l in str(data).splitlines(True))
+
+
def load_isolate(content, error):
- """Loads the .isolate file. Returns the command, dependencies and read_only
- flag.
+ """Loads the .isolate file and returns the information unprocessed.
+
+ Returns the command, dependencies and read_only flag. The dependencies are
+ fixed to use os.path.sep.
"""
# Load the .isolate file, process its conditions, retrieve the command and
# dependencies.
@@ -139,58 +129,55 @@ def load_isolate(content, error):
error('Failed to load configuration for \'%s\'' % flavor)
# Merge tracked and untracked dependencies, isolate.py doesn't care about the
# trackability of the dependencies, only the build tool does.
- return config.command, config.tracked + config.untracked, config.read_only
-
+ dependencies = [
+ f.replace('/', os.path.sep) for f in config.tracked + config.untracked
+ ]
+ return config.command, dependencies, config.read_only
-def process_inputs(prevdict, indir, infiles, level, read_only):
- """Returns a dictionary of input files, populated with the files' mode and
- hash.
- |prevdict| is the previous dictionary. It is used to retrieve the cached sha-1
- to skip recalculating the hash.
+def process_input(filepath, prevdict, level, read_only):
+ """Processes an input file, a dependency, and return meta data about it.
- |level| determines the amount of information retrieved.
- 1 loads no information. 2 loads minimal stat() information. 3 calculates the
- sha-1 of the file's content.
-
- The file mode is manipulated if read_only is True. In practice, we only save
- one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On windows, mode
- is not set since all files are 'executable' by default.
+ Arguments:
+ - filepath: File to act on.
+ - prevdict: the previous dictionary. It is used to retrieve the cached sha-1
+ to skip recalculating the hash.
+ - level: determines the amount of information retrieved.
+ - read_only: If True, the file mode is manipulated. In practice, only save
+ one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On
+ windows, mode is not set since all files are 'executable' by
+ default.
"""
assert level in (NO_INFO, STATS_ONLY, WITH_HASH)
- outdict = {}
- for infile in infiles:
- filepath = os.path.join(indir, infile)
- outdict[infile] = {}
- if level >= STATS_ONLY:
- filestats = os.stat(filepath)
- if trace_inputs.get_flavor() != 'win':
- filemode = stat.S_IMODE(filestats.st_mode)
- # Remove write access for non-owner.
- filemode &= ~(stat.S_IWGRP | stat.S_IWOTH)
- if read_only:
- filemode &= ~stat.S_IWUSR
- if filemode & stat.S_IXUSR:
- filemode |= (stat.S_IXGRP | stat.S_IXOTH)
- else:
- filemode &= ~(stat.S_IXGRP | stat.S_IXOTH)
- outdict[infile]['mode'] = filemode
- outdict[infile]['size'] = filestats.st_size
- # Used to skip recalculating the hash. Use the most recent update time.
- outdict[infile]['timestamp'] = int(round(filestats.st_mtime))
- # If the timestamp wasn't updated, carry on the sha-1.
- if (prevdict.get(infile, {}).get('timestamp') ==
- outdict[infile]['timestamp'] and
- 'sha-1' in prevdict[infile]):
- # Reuse the previous hash.
- outdict[infile]['sha-1'] = prevdict[infile]['sha-1']
-
- if level >= WITH_HASH and not outdict[infile].get('sha-1'):
- h = hashlib.sha1()
- with open(filepath, 'rb') as f:
- h.update(f.read())
- outdict[infile]['sha-1'] = h.hexdigest()
- return outdict
+ out = {}
+ if level >= STATS_ONLY:
+ filestats = os.stat(filepath)
+ if trace_inputs.get_flavor() != 'win':
+ filemode = stat.S_IMODE(filestats.st_mode)
+ # Remove write access for non-owner.
+ filemode &= ~(stat.S_IWGRP | stat.S_IWOTH)
+ if read_only:
+ filemode &= ~stat.S_IWUSR
+ if filemode & stat.S_IXUSR:
+ filemode |= (stat.S_IXGRP | stat.S_IXOTH)
+ else:
+ filemode &= ~(stat.S_IXGRP | stat.S_IXOTH)
+ out['mode'] = filemode
+ out['size'] = filestats.st_size
+ # Used to skip recalculating the hash. Use the most recent update time.
+ out['timestamp'] = int(round(filestats.st_mtime))
+ # If the timestamp wasn't updated, carry on the sha-1.
+ if (prevdict.get('timestamp') == out['timestamp'] and
+ 'sha-1' in prevdict):
+ # Reuse the previous hash.
+ out['sha-1'] = prevdict['sha-1']
+
+ if level >= WITH_HASH and not out.get('sha-1'):
+ h = hashlib.sha1()
+ with open(filepath, 'rb') as f:
+ h.update(f.read())
+ out['sha-1'] = h.hexdigest()
+ return out
def recreate_tree(outdir, indir, infiles, action):
@@ -226,111 +213,304 @@ def recreate_tree(outdir, indir, infiles, action):
run_test_from_archive.link_file(outfile, infile, action)
-def load_results(resultfile):
- """Loads the previous results as an optimization."""
- data = {}
- if resultfile and os.path.isfile(resultfile):
- resultfile = os.path.abspath(resultfile)
- with open(resultfile, 'r') as f:
- data = json.load(f)
- logging.debug('Loaded %s' % resultfile)
- else:
- resultfile = os.path.abspath(resultfile)
- logging.debug('%s was not found' % resultfile)
+def result_to_state(filename):
+ """Replaces the file's extension."""
+ return filename.rsplit('.', 1)[0] + '.state'
- # Works with native os.path.sep but stores as '/'.
- if 'files' in data and os.path.sep != '/':
- data['files'] = dict(
- (k.replace('/', os.path.sep), v)
- for k, v in data['files'].iteritems())
- return data
+def write_json(stream, data):
+ """Writes data to a stream as json."""
+ json.dump(data, stream, indent=2, sort_keys=True)
+ stream.write('\n')
-def save_results(resultfile, data):
- data = data.copy()
- # Works with native os.path.sep but stores as '/'.
- if os.path.sep != '/':
- data['files'] = dict(
- (k.replace(os.path.sep, '/'), v) for k, v in data['files'].iteritems())
+def determine_root_dir(relative_root, infiles):
+ """For a list of infiles, determines the deepest root directory that is
+ referenced indirectly.
- f = None
- try:
- if resultfile:
- f = open(resultfile, 'wb')
- else:
- f = sys.stdout
- json.dump(data, f, indent=2, sort_keys=True)
- f.write('\n')
- finally:
- if resultfile and f:
- f.close()
+ All arguments must be using os.path.sep.
+ """
+ # The trick used to determine the root directory is to look at "how far" back
+ # up it is looking up.
+ deepest_root = relative_root
+ for i in infiles:
+ x = relative_root
+ while i.startswith('..' + os.path.sep):
+ i = i[3:]
+ assert not i.startswith(os.path.sep)
+ x = os.path.dirname(x)
+ if deepest_root.startswith(x):
+ deepest_root = x
+ logging.debug(
+ 'determine_root_dir(%s, %s) -> %s' % (
+ relative_root, infiles, deepest_root))
+ return deepest_root
- total_bytes = sum(i.get('size', 0) for i in data['files'].itervalues())
- if total_bytes:
- logging.debug('Total size: %d bytes' % total_bytes)
+def process_variables(variables, relative_base_dir, error):
+ """Processes path variables as a special case and returns a copy of the dict.
-def isolate(outdir, mode, indir, infiles, data):
- """Main function to isolate a target with its dependencies.
+ For each 'path' varaible: first normalizes it, verifies it exists, converts it
+ to an absolute path, then sets it as relative to relative_base_dir.
+ """
+ variables = variables.copy()
+ for i in ('DEPTH', 'PRODUCT_DIR'):
+ if i not in variables:
+ continue
+ variable = os.path.normpath(variables[i])
+ if not os.path.isdir(variable):
+ error('%s=%s is not a directory' % (i, variable))
+ # Variables could contain / or \ on windows. Always normalize to
+ # os.path.sep.
+ variable = os.path.abspath(variable.replace('/', os.path.sep))
+ # All variables are relative to the .isolate file.
+ variables[i] = os.path.relpath(variable, relative_base_dir)
+ return variables
+
+
+class Flattenable(object):
+ """Represents data that can be represented as a json file."""
+ MEMBERS = ()
+
+ def flatten(self):
+ """Returns a json-serializable version of itself."""
+ return dict((member, getattr(self, member)) for member in self.MEMBERS)
+
+ @classmethod
+ def load(cls, data):
+ """Loads a flattened version."""
+ data = data.copy()
+ out = cls()
+ for member in out.MEMBERS:
+ if member in data:
+ value = data.pop(member)
+ setattr(out, member, value)
+ assert not data, data
+ return out
+
+ @classmethod
+ def load_file(cls, filename):
+ """Loads the data from a file or return an empty instance."""
+ out = cls()
+ try:
+ with open(filename, 'r') as f:
+ out = cls.load(json.load(f))
+ logging.debug('Loaded %s(%s)' % (cls.__name__, filename))
+ except IOError:
+ pass
+ return out
- Arguments:
- - outdir: Output directory where the result is stored. Depends on |mode|.
- - indir: Root directory to be used as the base directory for infiles.
- - infiles: List of files, with relative path, to process.
- - mode: Action to do. See file level docstring.
- - data: Contains all the command specific meta-data.
- Some arguments are optional, dependending on |mode|. See the corresponding
- MODE<mode> function for the exact behavior.
+class Result(Flattenable):
+ """Describes the content of a .result file.
+
+ This file is used by run_test_from_archive.py so its content is strictly only
+ what is necessary to run the test outside of a checkout.
+ """
+ MEMBERS = (
+ 'command',
+ 'files',
+ 'read_only',
+ 'relative_cwd',
+ )
+
+ def __init__(self):
+ super(Result, self).__init__()
+ self.command = []
+ self.files = {}
+ self.read_only = None
+ self.relative_cwd = None
+
+ def update(self, command, infiles, read_only, relative_cwd):
+ """Updates the result state with new information."""
+ self.command = command
+ # Add new files.
+ for f in infiles:
+ self.files.setdefault(f, {})
+ # Prune extraneous files that are not a dependency anymore.
+ for f in set(infiles).difference(self.files.keys()):
+ del self.files[f]
+ if read_only is not None:
+ self.read_only = read_only
+ self.relative_cwd = relative_cwd
+
+ def __str__(self):
+ out = '%s(\n' % self.__class__.__name__
+ out += ' command: %s\n' % self.command
+ out += ' files: %s\n' % ', '.join(sorted(self.files))
+ out += ' read_only: %s\n' % self.read_only
+ out += ' relative_cwd: %s)' % self.relative_cwd
+ return out
+
+
+class SavedState(Flattenable):
+ """Describes the content of a .state file.
+
+ The items in this file are simply to improve the developer's life and aren't
+ used by run_test_from_archive.py. This file can always be safely removed.
+
+ isolate_file permits to find back root_dir, variables are used for stateful
+ rerun.
"""
- modes = {
- 'check': MODEcheck,
- 'hashtable': MODEhashtable,
- 'remap': MODEremap,
- 'run': MODErun,
- 'trace': MODEtrace,
- }
- mode_fn = modes[mode]
-
- infiles = expand_directories(
- indir, infiles, lambda x: re.match(r'.*\.(git|svn|pyc)$', x))
-
- # Only hashtable mode really needs the sha-1.
- level = {
- 'check': NO_INFO,
- 'hashtable': WITH_HASH,
- 'remap': STATS_ONLY,
- 'run': STATS_ONLY,
- 'trace': STATS_ONLY,
- }
- # Regenerate data['files'] from infiles.
- data['files'] = process_inputs(
- data.get('files', {}), indir, infiles, level[mode], data.get('read_only'))
-
- result = mode_fn(outdir, indir, data)
- return result, data
-
-
-def MODEcheck(_outdir, _indir, _data):
+ MEMBERS = (
+ 'isolate_file',
+ 'variables',
+ )
+
+ def __init__(self):
+ super(SavedState, self).__init__()
+ self.isolate_file = None
+ self.variables = {}
+
+ def update(self, isolate_file, variables):
+ """Updates the saved state with new information."""
+ self.isolate_file = isolate_file
+ self.variables.update(variables)
+
+ def __str__(self):
+ out = '%s(\n' % self.__class__.__name__
+ out += ' isolate_file: %s\n' % self.isolate_file
+ out += ' variables: %s' % ''.join(
+ '\n %s=%s' % (k, self.variables[k]) for k in sorted(self.variables))
+ out += ')'
+ return out
+
+
+class CompleteState(object):
+ """Contains all the state to run the task at hand."""
+ def __init__(self, result_file, result, saved_state, out_dir):
+ super(CompleteState, self).__init__()
+ self.result_file = result_file
+ # Contains the data that will be used by run_test_from_archive.py
+ self.result = result
+ # Contains the data to ease developer's use-case but that is not strictly
+ # necessary.
+ self.saved_state = saved_state
+ self.out_dir = out_dir
+
+ @classmethod
+ def load_files(cls, result_file, out_dir):
+ """Loads state from disk."""
+ assert os.path.isabs(result_file), result_file
+ assert result_file.rsplit('.', 1)[1] == 'result', result_file
+ return cls(
+ result_file,
+ Result.load_file(result_file),
+ SavedState.load_file(result_to_state(result_file)),
+ out_dir)
+
+ def load_isolate(self, isolate_file, variables, error):
+ """Updates self.result and self.saved_state with information loaded from a
+ .isolate file.
+
+ Processes the loaded data, deduce root_dir, relative_cwd.
+ """
+ # Make sure to not depend on os.getcwd().
+ assert os.path.isabs(isolate_file), isolate_file
+ logging.info(
+ 'CompleteState.load_isolate(%s, %s)' % (isolate_file, variables))
+ relative_base_dir = os.path.dirname(isolate_file)
+
+ # Processes the variables and update the saved state.
+ variables = process_variables(variables, relative_base_dir, error)
+ self.saved_state.update(isolate_file, variables)
+
+ with open(isolate_file, 'r') as f:
+ # At that point, variables are not replaced yet in command and infiles.
+ # infiles may contain directory entries and is in posix style.
+ command, infiles, read_only = load_isolate(f.read(), error)
+ command = [eval_variables(i, variables) for i in command]
+ infiles = [eval_variables(f, variables) for f in infiles]
+ # root_dir is automatically determined by the deepest root accessed with the
+ # form '../../foo/bar'.
+ root_dir = determine_root_dir(relative_base_dir, infiles)
+ # The relative directory is automatically determined by the relative path
+ # between root_dir and the directory containing the .isolate file,
+ # isolate_base_dir.
+ relative_cwd = os.path.relpath(relative_base_dir, root_dir)
+ # Normalize the files based to root_dir. It is important to keep the
+ # trailing os.path.sep at that step.
+ infiles = [
+ relpath(normpath(os.path.join(relative_base_dir, f)), root_dir)
+ for f in infiles
+ ]
+ # Expand the directories by listing each file inside. Up to now, trailing
+ # os.path.sep must be kept.
+ infiles = expand_directories(
+ root_dir,
+ infiles,
+ lambda x: re.match(r'.*\.(git|svn|pyc)$', x))
+
+ # Finally, update the new stuff in the foo.result file, the file that is
+ # used by run_test_from_archive.py.
+ self.result.update(command, infiles, read_only, relative_cwd)
+ logging.debug(self)
+
+ def process_inputs(self, level):
+ """Updates self.result.files with the files' mode and hash.
+
+ See process_input() for more information.
+ """
+ for infile in sorted(self.result.files):
+ filepath = os.path.join(self.root_dir, infile)
+ self.result.files[infile] = process_input(
+ filepath, self.result.files[infile], level, self.result.read_only)
+
+ def save_files(self):
+ """Saves both self.result and self.saved_state."""
+ with open(self.result_file, 'wb') as f:
+ write_json(f, self.result.flatten())
+ total_bytes = sum(i.get('size', 0) for i in self.result.files.itervalues())
+ if total_bytes:
+ logging.debug('Total size: %d bytes' % total_bytes)
+ with open(result_to_state(self.result_file), 'wb') as f:
+ write_json(f, self.saved_state.flatten())
+
+ @property
+ def root_dir(self):
+ """isolate_file is always inside relative_cwd relative to root_dir."""
+ isolate_dir = os.path.dirname(self.saved_state.isolate_file)
+ # Special case '.'.
+ if self.result.relative_cwd == '.':
+ return isolate_dir
+ assert isolate_dir.endswith(self.result.relative_cwd), (
+ isolate_dir, self.result.relative_cwd)
+ return isolate_dir[:-len(self.result.relative_cwd)]
+
+ @property
+ def resultdir(self):
+ """Directory containing the results, usually equivalent to the variable
+ PRODUCT_DIR.
+ """
+ return os.path.dirname(self.result_file)
+
+ def __str__(self):
+ out = '%s(\n' % self.__class__.__name__
+ out += ' root_dir: %s\n' % self.root_dir
+ out += ' result: %s\n' % indent(self.result, 2)
+ out += ' saved_state: %s)' % indent(self.saved_state, 2)
+ return out
+
+
+def MODEcheck(_outdir, _state):
"""No-op."""
return 0
-def MODEhashtable(outdir, indir, data):
+def MODEhashtable(outdir, state):
outdir = (
- outdir or os.path.join(os.path.dirname(data['resultdir']), 'hashtable'))
+ outdir or os.path.join(os.path.dirname(state.resultdir), 'hashtable'))
if not os.path.isdir(outdir):
os.makedirs(outdir)
- for relfile, properties in data['files'].iteritems():
- infile = os.path.join(indir, relfile)
+ for relfile, properties in state.result.files.iteritems():
+ infile = os.path.join(state.root_dir, relfile)
outfile = os.path.join(outdir, properties['sha-1'])
if os.path.isfile(outfile):
# Just do a quick check that the file size matches. No need to stat()
# again the input file, grab the value from the dict.
out_size = os.stat(outfile).st_size
in_size = (
- data.get('files', {}).get(infile, {}).get('size') or
+ state.result.files[infile].get('size') or
os.stat(infile).st_size)
if in_size == out_size:
continue
@@ -340,7 +520,7 @@ def MODEhashtable(outdir, indir, data):
return 0
-def MODEremap(outdir, indir, data):
+def MODEremap(outdir, state):
if not outdir:
outdir = tempfile.mkdtemp(prefix='isolate')
else:
@@ -351,34 +531,40 @@ def MODEremap(outdir, indir, data):
print 'Can\'t remap in a non-empty directory'
return 1
recreate_tree(
- outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK)
- if data['read_only']:
+ outdir,
+ state.root_dir,
+ state.result.files.keys(),
+ run_test_from_archive.HARDLINK)
+ if state.result.read_only:
run_test_from_archive.make_writable(outdir, True)
return 0
-def MODErun(_outdir, indir, data):
+def MODErun(_outdir, state):
"""Always uses a temporary directory."""
try:
outdir = tempfile.mkdtemp(prefix='isolate')
recreate_tree(
- outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK)
- cwd = os.path.join(outdir, data['relative_cwd'])
+ outdir,
+ state.root_dir,
+ state.result.files.keys(),
+ run_test_from_archive.HARDLINK)
+ cwd = os.path.join(outdir, state.result.relative_cwd)
if not os.path.isdir(cwd):
os.makedirs(cwd)
- if data['read_only']:
+ if state.result.read_only:
run_test_from_archive.make_writable(outdir, True)
- if not data['command']:
+ if not state.result.command:
print 'No command to run'
return 1
- cmd = trace_inputs.fix_python_path(data['command'])
+ cmd = trace_inputs.fix_python_path(state.result.command)
logging.info('Running %s, cwd=%s' % (cmd, cwd))
return subprocess.call(cmd, cwd=cwd)
finally:
run_test_from_archive.rmtree(outdir)
-def MODEtrace(_outdir, indir, data):
+def MODEtrace(_outdir, state):
"""Shortcut to use trace_inputs.py properly.
It constructs the equivalent of dictfiles. It is hardcoded to base the
@@ -386,140 +572,107 @@ def MODEtrace(_outdir, indir, data):
"""
logging.info(
'Running %s, cwd=%s' % (
- data['command'], os.path.join(indir, data['relative_cwd'])))
+ state.result.command,
+ os.path.join(state.root_dir, state.result.relative_cwd)))
product_dir = None
- if data['resultdir'] and indir:
+ if state.resultdir and state.root_dir:
# Defaults to none if both are the same directory.
try:
- product_dir = os.path.relpath(data['resultdir'], indir) or None
+ product_dir = os.path.relpath(state.resultdir, state.root_dir) or None
except ValueError:
- # This happens on Windows if data['resultdir'] is one drive, let's say
- # 'C:\' and indir on another one like 'D:\'.
+ # This happens on Windows if state.resultdir is one drive, let's say
+ # 'C:\' and state.root_dir on another one like 'D:\'.
product_dir = None
- if not data['command']:
+ if not state.result.command:
print 'No command to run'
return 1
return trace_inputs.trace_inputs(
- data['resultfile'] + '.log',
- data['command'],
- indir,
- data['relative_cwd'],
+ state.result_file + '.log',
+ state.result.command,
+ state.root_dir,
+ state.result.relative_cwd,
product_dir,
False)
-def get_valid_modes():
- """Returns the modes that can be used."""
- return sorted(
- i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE'))
+# Must be declared after all the functions.
+VALID_MODES = {
+ 'check': MODEcheck,
+ 'hashtable': MODEhashtable,
+ 'remap': MODEremap,
+ 'run': MODErun,
+ 'trace': MODEtrace,
+}
-def determine_root_dir(relative_root, infiles):
- """For a list of infiles, determines the deepest root directory that is
- referenced indirectly.
+# Only hashtable mode really needs the sha-1.
+LEVELS = {
+ 'check': NO_INFO,
+ 'hashtable': WITH_HASH,
+ 'remap': STATS_ONLY,
+ 'run': STATS_ONLY,
+ 'trace': STATS_ONLY,
+}
- All the paths are processed as posix-style but are eventually returned as
- os.path.sep.
- """
- # The trick used to determine the root directory is to look at "how far" back
- # up it is looking up.
- relative_root = relative_root.replace(os.path.sep, '/')
- deepest_root = relative_root
- for i in infiles:
- x = relative_root
- i = i.replace(os.path.sep, '/')
- while i.startswith('../'):
- i = i[3:]
- assert not i.startswith('/')
- x = posixpath.dirname(x)
- if deepest_root.startswith(x):
- deepest_root = x
- deepest_root = deepest_root.replace('/', os.path.sep)
- logging.debug(
- 'determine_root_dir(%s, %s) -> %s' % (
- relative_root, infiles, deepest_root))
- return deepest_root.replace('/', os.path.sep)
-
-
-def process_options(variables, resultfile, input_file, error):
- """Processes the options and loads the input and result files.
-
- Returns a tuple of:
- - The deepest root directory used as a relative path, to be used to determine
- 'indir'.
- - The list of dependency files.
- - The 'data' dictionary. It contains all the processed data from the result
- file if it existed, augmented with current data. This permits keeping the
- state of data['variables'] across runs, simplifying the command line on
- repeated run, e.g. the variables are kept between runs.
- Warning: data['files'] is stale at that point and it only use as a cache for
- the previous hash if the file wasn't touched between two runs, to speed it
- up. 'infiles' must be used as the valid list of dependencies.
+
+assert (
+ sorted(i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) ==
+ sorted(VALID_MODES))
+
+
+def isolate(result_file, isolate_file, mode, variables, out_dir, error):
+ """Main function to isolate a target with its dependencies.
+
+ Arguments:
+ - result_file: File to load or save state from.
+ - isolate_file: File to load data from. Can be None if result_file contains
+ the necessary information.
+ - mode: Action to do. See file level docstring.
+ - variables: Variables to process, if necessary.
+ - out_dir: Output directory where the result is stored. It's use depends on
+ |mode|.
+
+ Some arguments are optional, dependending on |mode|. See the corresponding
+ MODE<mode> function for the exact behavior.
"""
- # Constants
- input_file = os.path.abspath(input_file).replace('/', os.path.sep)
- relative_base_dir = os.path.dirname(input_file)
- resultfile = os.path.abspath(resultfile).replace('/', os.path.sep)
- logging.info(
- 'process_options(%s, %s, %s, ...)' % (variables, resultfile, input_file))
+ # First, load the previous stuff if it was present. Namely, "foo.result" and
+ # "foo.state".
+ complete_state = CompleteState.load_files(result_file, out_dir)
+ isolate_file = isolate_file or complete_state.saved_state.isolate_file
+ if not isolate_file:
+ error('A .isolate file is required.')
+ if (complete_state.saved_state.isolate_file and
+ isolate_file != complete_state.saved_state.isolate_file):
+ error(
+ '%s and %s do not match.' % (
+ isolate_file, complete_state.saved_state.isolate_file))
- # Process path variables as a special case. First normalize it, verifies it
- # exists, convert it to an absolute path, then set it as relative to
- # relative_base_dir.
- for i in ('DEPTH', 'PRODUCT_DIR'):
- if i not in variables:
- continue
- variable = os.path.normpath(variables[i])
- if not os.path.isdir(variable):
- error('%s=%s is not a directory' % (i, variable))
- variable = os.path.abspath(variable).replace('/', os.path.sep)
- # All variables are relative to the input file.
- variables[i] = os.path.relpath(variable, relative_base_dir)
+ try:
+ # Then process options and expands directories.
+ complete_state.load_isolate(isolate_file, variables, error)
- # At that point, variables are not replaced yet in command and infiles.
- command, infiles, read_only = load_isolate(
- open(input_file, 'r').read(), error)
-
- # Load the result file and set the values already known about.
- data = load_results(resultfile)
- data['read_only'] = read_only
- data['resultfile'] = resultfile
- data['resultdir'] = os.path.dirname(resultfile)
- # Keep the old variables but override them with the new ones.
- data.setdefault('variables', {}).update(variables)
-
- # Convert the variables.
- data['command'] = [eval_variables(i, data['variables']) for i in command]
- infiles = [eval_variables(f, data['variables']) for f in infiles]
- root_dir = determine_root_dir(relative_base_dir, infiles)
-
- # The relative directory is automatically determined by the relative path
- # between root_dir and the directory containing the .isolate file,
- # isolate_base_dir. Keep relative_cwd posix-style.
- data['relative_cwd'] = os.path.relpath(relative_base_dir, root_dir).replace(
- os.path.sep, '/')
-
- logging.debug('relative_cwd: %s' % data['relative_cwd'])
- logging.debug(
- 'variables: %s' % ', '.join(
- '%s=%s' % (k, data['variables'][k]) for k in sorted(data['variables'])))
- logging.debug('command: %s' % data['command'])
- logging.debug('read_only: %s' % data['read_only'])
+ # Regenerate complete_state.result.files.
+ complete_state.process_inputs(LEVELS[mode])
+
+ # Finally run the mode-specific code.
+ result = VALID_MODES[mode](out_dir, complete_state)
+ except run_test_from_archive.MappingError, e:
+ error(str(e))
- # Normalize the infiles paths in case some absolute paths got in.
- logging.debug('infiles before normalization: %s' % infiles)
- infiles = [normpath(os.path.join(data['relative_cwd'], f)) for f in infiles]
- logging.debug('processed infiles: %s' % infiles)
- return root_dir, infiles, data
+ # Then store the result and state.
+ complete_state.save_files()
+ return result
def main():
+ """Handles CLI and normalizes the input arguments to pass them to isolate().
+ """
default_variables = [('OS', trace_inputs.get_flavor())]
if sys.platform in ('win32', 'cygwin'):
default_variables.append(('EXECUTABLE_SUFFIX', '.exe'))
else:
default_variables.append(('EXECUTABLE_SUFFIX', ''))
- valid_modes = get_valid_modes() + ['noop']
+ valid_modes = sorted(VALID_MODES.keys() + ['noop'])
parser = optparse.OptionParser(
usage='%prog [options] [.isolate file]',
description=sys.modules[__name__].__doc__)
@@ -560,7 +713,10 @@ def main():
if not options.mode:
parser.error('--mode is required')
- if len(args) != 1:
+ if not options.result:
+ parser.error('--result is required.')
+
+ if len(args) > 1:
logging.debug('%s' % sys.argv)
parser.error('Use only one argument which should be a .isolate file')
@@ -569,21 +725,27 @@ def main():
# have all the test data files checked out. Exit silently.
return 0
- root_dir, infiles, data = process_options(
- dict(options.variables), options.result, args[0], parser.error)
-
- try:
- resultcode, data = isolate(
- options.outdir,
- options.mode,
- root_dir,
- infiles,
- data)
- except run_test_from_archive.MappingError, e:
- print >> sys.stderr, str(e)
- return 1
- save_results(options.result, data)
- return resultcode
+ # Make sure the paths make sense. On Windows, / and \ are often mixed together
+ # in a path.
+ result_file = os.path.abspath(options.result.replace('/', os.path.sep))
+ # input_file may be None.
+ input_file = (
+ os.path.abspath(args[0].replace('/', os.path.sep)) if args else None)
+ # out_dir may be None.
+ out_dir = (
+ os.path.abspath(options.outdir.replace('/', os.path.sep))
+ if options.outdir else None)
+ # Fix variables.
+ variables = dict(options.variables)
+
+ # After basic validation, pass this to isolate().
+ return isolate(
+ result_file,
+ input_file,
+ options.mode,
+ variables,
+ out_dir,
+ parser.error)
if __name__ == '__main__':
diff --git a/tools/isolate/isolate_smoke_test.py b/tools/isolate/isolate_smoke_test.py
index f2e7f25..f16930a 100755
--- a/tools/isolate/isolate_smoke_test.py
+++ b/tools/isolate/isolate_smoke_test.py
@@ -29,19 +29,26 @@ RELATIVE_CWD = {
'missing_trailing_slash': '.',
'no_run': '.',
'non_existent': '.',
- 'touch_root': 'data/isolate',
+ 'touch_root': os.path.join('data', 'isolate'),
'with_flag': '.',
}
DEPENDENCIES = {
'fail': ['fail.py'],
'missing_trailing_slash': [],
'no_run': [
- 'no_run.isolate', 'files1/test_file1.txt', 'files1/test_file2.txt',
+ 'no_run.isolate',
+ os.path.join('files1', 'test_file1.txt'),
+ os.path.join('files1', 'test_file2.txt'),
],
'non_existent': [],
- 'touch_root': ['data/isolate/touch_root.py', 'isolate.py'],
+ 'touch_root': [
+ os.path.join('data', 'isolate', 'touch_root.py'),
+ 'isolate.py',
+ ],
'with_flag': [
- 'with_flag.py', 'files1/test_file1.txt', 'files1/test_file2.txt',
+ 'with_flag.py',
+ os.path.join('files1', 'test_file1.txt'),
+ os.path.join('files1', 'test_file2.txt'),
],
}
@@ -69,6 +76,7 @@ class IsolateBase(unittest.TestCase):
self.tempdir = tempfile.mkdtemp()
self.result = os.path.join(self.tempdir, 'isolate_smoke_test.result')
self.outdir = os.path.join(self.tempdir, 'isolated')
+ self.maxDiff = None
def tearDown(self):
shutil.rmtree(self.tempdir)
@@ -79,9 +87,7 @@ class IsolateBase(unittest.TestCase):
def _result_tree(self):
actual = []
for root, _dirs, files in os.walk(self.outdir):
- actual.extend(
- os.path.join(root, f)[len(self.outdir)+1:].replace(os.path.sep, '/')
- for f in files)
+ actual.extend(os.path.join(root, f)[len(self.outdir)+1:] for f in files)
return sorted(actual)
def _expected_tree(self):
@@ -121,28 +127,34 @@ class IsolateBase(unittest.TestCase):
files[filename][u'sha-1'] = unicode(h.hexdigest())
return files
- def _expected_result(self, args, read_only, extra_vars=None):
+ def _expected_result(self, args, read_only):
"""Verifies self.result contains the expected data."""
- flavor = isolate.trace_inputs.get_flavor()
expected = {
u'files': self._gen_files(read_only),
u'read_only': read_only,
u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
- u'resultdir': os.path.dirname(self.result),
- u'resultfile': self.result,
+ }
+ if args:
+ expected[u'command'] = [u'python'] + [unicode(x) for x in args]
+ else:
+ expected[u'command'] = []
+ self.assertEquals(expected, json.load(open(self.result, 'r')))
+
+ def _expected_saved_state(self, extra_vars):
+ flavor = isolate.trace_inputs.get_flavor()
+ expected = {
+ u'isolate_file': unicode(self.filename()),
u'variables': {
u'EXECUTABLE_SUFFIX': '.exe' if flavor == 'win' else '',
u'OS': unicode(flavor),
},
}
expected['variables'].update(extra_vars or {})
- if args:
- expected[u'command'] = [u'python'] + [unicode(x) for x in args]
- else:
- expected[u'command'] = []
+ self.assertEquals(expected, json.load(open(self.saved_state(), 'r')))
- self.assertEquals(expected, json.load(open(self.result, 'rb')))
- return expected
+ def _expect_results(self, args, read_only, extra_vars):
+ self._expected_result(args, read_only)
+ self._expected_saved_state(extra_vars)
def _expect_no_result(self):
self.assertFalse(os.path.exists(self.result))
@@ -208,6 +220,9 @@ class IsolateBase(unittest.TestCase):
self.assertTrue(os.path.isfile(filename), filename)
return filename
+ def saved_state(self):
+ return isolate.result_to_state(self.result)
+
class Isolate(unittest.TestCase):
def test_help_modes(self):
@@ -250,7 +265,7 @@ class Isolate_check(IsolateBase):
def test_fail(self):
self._execute('check', 'fail.isolate', [], False)
self._expect_no_tree()
- self._expected_result(['fail.py'], None)
+ self._expect_results(['fail.py'], None, None)
def test_missing_trailing_slash(self):
try:
@@ -273,17 +288,18 @@ class Isolate_check(IsolateBase):
def test_no_run(self):
self._execute('check', 'no_run.isolate', [], False)
self._expect_no_tree()
- self._expected_result([], None)
+ self._expect_results([], None, None)
def test_touch_root(self):
self._execute('check', 'touch_root.isolate', [], False)
self._expect_no_tree()
- self._expected_result(['touch_root.py'], None)
+ self._expect_results(['touch_root.py'], None, None)
def test_with_flag(self):
self._execute('check', 'with_flag.isolate', ['-V', 'FLAG', 'gyp'], False)
self._expect_no_tree()
- self._expected_result(['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'})
+ self._expect_results(
+ ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'})
class Isolate_hashtable(IsolateBase):
@@ -297,7 +313,7 @@ class Isolate_hashtable(IsolateBase):
def test_fail(self):
self._execute('hashtable', 'fail.isolate', [], False)
self._expected_hash_tree()
- self._expected_result(['fail.py'], None)
+ self._expect_results(['fail.py'], None, None)
def test_missing_trailing_slash(self):
try:
@@ -320,18 +336,19 @@ class Isolate_hashtable(IsolateBase):
def test_no_run(self):
self._execute('hashtable', 'no_run.isolate', [], False)
self._expected_hash_tree()
- self._expected_result([], None)
+ self._expect_results([], None, None)
def test_touch_root(self):
self._execute('hashtable', 'touch_root.isolate', [], False)
self._expected_hash_tree()
- self._expected_result(['touch_root.py'], None)
+ self._expect_results(['touch_root.py'], None, None)
def test_with_flag(self):
self._execute(
'hashtable', 'with_flag.isolate', ['-V', 'FLAG', 'gyp'], False)
self._expected_hash_tree()
- self._expected_result(['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'})
+ self._expect_results(
+ ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'})
class Isolate_remap(IsolateBase):
@@ -340,7 +357,7 @@ class Isolate_remap(IsolateBase):
def test_fail(self):
self._execute('remap', 'fail.isolate', [], False)
self._expected_tree()
- self._expected_result(['fail.py'], None)
+ self._expect_results(['fail.py'], None, None)
def test_missing_trailing_slash(self):
try:
@@ -363,17 +380,18 @@ class Isolate_remap(IsolateBase):
def test_no_run(self):
self._execute('remap', 'no_run.isolate', [], False)
self._expected_tree()
- self._expected_result([], None)
+ self._expect_results([], None, None)
def test_touch_root(self):
self._execute('remap', 'touch_root.isolate', [], False)
self._expected_tree()
- self._expected_result(['touch_root.py'], None)
+ self._expect_results(['touch_root.py'], None, None)
def test_with_flag(self):
self._execute('remap', 'with_flag.isolate', ['-V', 'FLAG', 'gyp'], False)
self._expected_tree()
- self._expected_result(['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'})
+ self._expect_results(
+ ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'})
class Isolate_run(IsolateBase):
@@ -389,7 +407,7 @@ class Isolate_run(IsolateBase):
except subprocess.CalledProcessError:
pass
self._expect_empty_tree()
- self._expected_result(['fail.py'], None)
+ self._expect_results(['fail.py'], None, None)
def test_missing_trailing_slash(self):
try:
@@ -416,18 +434,19 @@ class Isolate_run(IsolateBase):
except subprocess.CalledProcessError:
pass
self._expect_empty_tree()
- self._expected_result([], None)
+ self._expect_results([], None, None)
def test_touch_root(self):
self._execute('run', 'touch_root.isolate', [], False)
self._expect_empty_tree()
- self._expected_result(['touch_root.py'], None)
+ self._expect_results(['touch_root.py'], None, None)
def test_with_flag(self):
self._execute('run', 'with_flag.isolate', ['-V', 'FLAG', 'run'], False)
# Not sure about the empty tree, should be deleted.
self._expect_empty_tree()
- self._expected_result(['with_flag.py', 'run'], None, {u'FLAG': u'run'})
+ self._expect_results(
+ ['with_flag.py', 'run'], None, {u'FLAG': u'run'})
class Isolate_trace(IsolateBase):
@@ -446,7 +465,7 @@ class Isolate_trace(IsolateBase):
except subprocess.CalledProcessError, e:
out = e.output
self._expect_no_tree()
- self._expected_result(['fail.py'], None)
+ self._expect_results(['fail.py'], None, None)
# In theory, there should be 2 \n at the end of expected but for an
# unknown reason there's 3 \n on Windows so just rstrip() and compare the
# text, that's sufficient for this test.
@@ -461,8 +480,11 @@ class Isolate_trace(IsolateBase):
out = e.output
self._expect_no_tree()
self._expect_no_result()
- expected = 'Input directory %s must have a trailing slash\n' % os.path.join(
- ROOT_DIR, 'data', 'isolate', 'files1')
+ expected = (
+ 'Usage: isolate.py [options] [.isolate file]\n\n'
+ 'isolate.py: error: Input directory %s must have a trailing slash\n' %
+ os.path.join(ROOT_DIR, 'data', 'isolate', 'files1')
+ )
self.assertEquals(expected, out)
def test_non_existent(self):
@@ -473,8 +495,11 @@ class Isolate_trace(IsolateBase):
out = e.output
self._expect_no_tree()
self._expect_no_result()
- expected = 'Input file %s doesn\'t exist\n' % os.path.join(
- ROOT_DIR, 'data', 'isolate', 'A_file_that_do_not_exist')
+ expected = (
+ 'Usage: isolate.py [options] [.isolate file]\n\n'
+ 'isolate.py: error: Input file %s doesn\'t exist\n' %
+ os.path.join(ROOT_DIR, 'data', 'isolate', 'A_file_that_do_not_exist')
+ )
self.assertEquals(expected, out)
def test_no_run(self):
@@ -484,14 +509,14 @@ class Isolate_trace(IsolateBase):
except subprocess.CalledProcessError, e:
out = e.output
self._expect_no_tree()
- self._expected_result([], None)
+ self._expect_results([], None, None)
expected = 'No command to run\n'
self.assertEquals(expected, out)
def test_touch_root(self):
out = self._execute('trace', 'touch_root.isolate', [], True)
self._expect_no_tree()
- self._expected_result(['touch_root.py'], None)
+ self._expect_results(['touch_root.py'], None, None)
expected = {
'conditions': [
['OS=="%s"' % isolate.trace_inputs.get_flavor(), {
@@ -510,7 +535,7 @@ class Isolate_trace(IsolateBase):
out = self._execute(
'trace', 'with_flag.isolate', ['-V', 'FLAG', 'trace'], True)
self._expect_no_tree()
- self._expected_result(['with_flag.py', 'trace'], None, {u'FLAG': u'trace'})
+ self._expect_results(['with_flag.py', 'trace'], None, {u'FLAG': u'trace'})
expected = {
'conditions': [
['OS=="%s"' % isolate.trace_inputs.get_flavor(), {
@@ -519,6 +544,7 @@ class Isolate_trace(IsolateBase):
'with_flag.py',
],
isolate.trace_inputs.KEY_UNTRACKED: [
+ # Note that .isolate format mandates / and not os.path.sep.
'files1/',
],
},
diff --git a/tools/isolate/isolate_test.py b/tools/isolate/isolate_test.py
index e9cf486..2c50673 100755
--- a/tools/isolate/isolate_test.py
+++ b/tools/isolate/isolate_test.py
@@ -3,11 +3,9 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import json
import logging
import os
import sys
-import tempfile
import unittest
import isolate
@@ -20,39 +18,7 @@ class Isolate(unittest.TestCase):
# Everything should work even from another directory.
os.chdir(os.path.dirname(ROOT_DIR))
- def _run_process_options(self, values, variables, more_expected_data):
- """Runs isolate.process_options() and verify the results."""
- fd, temp_path = tempfile.mkstemp()
- try:
- # Reuse the file descriptor. On windows, it needs to be closed before
- # process_options() opens it, because mkstemp() opens it without file
- # sharing enabled.
- with os.fdopen(fd, 'w') as f:
- json.dump(values, f)
- root_dir, infiles, data = isolate.process_options(
- variables,
- temp_path,
- os.path.join('isolate', 'data', 'isolate', 'touch_root.isolate'),
- self.fail)
- finally:
- os.remove(temp_path)
-
- expected_data = {
- u'command': ['python', 'touch_root.py'],
- u'read_only': None,
- u'relative_cwd': 'data/isolate',
- u'resultfile': temp_path,
- u'resultdir': tempfile.gettempdir(),
- u'variables': {},
- }
- expected_data.update(more_expected_data)
- expected_files = sorted(
- ('isolate.py', os.path.join('data', 'isolate', 'touch_root.py')))
- self.assertEquals(ROOT_DIR, root_dir)
- self.assertEquals(expected_files, sorted(infiles))
- self.assertEquals(expected_data, data)
-
- def test_load_empty(self):
+ def test_load_isolate_empty(self):
content = "{}"
command, infiles, read_only = isolate.load_isolate(
content, self.fail)
@@ -60,35 +26,61 @@ class Isolate(unittest.TestCase):
self.assertEquals([], infiles)
self.assertEquals(None, read_only)
- def test_process_options_empty(self):
- # Passing nothing generates nothing unexpected.
- self._run_process_options({}, {}, {})
+ def test_result_load_empty(self):
+ values = {
+ }
+ expected = {
+ 'command': [],
+ 'files': {},
+ 'read_only': None,
+ 'relative_cwd': None,
+ }
+ self.assertEquals(expected, isolate.Result.load(values).flatten())
- def test_process_options(self):
- # The previous unexpected variables are kept, the 'variables' dictionary is
- # updated.
+ def test_result_load(self):
values = {
'command': 'maybe',
- 'foo': 'bar',
+ 'files': {'foo': 42},
+ 'read_only': 2,
+ 'relative_cwd': None,
+ }
+ expected = {
+ 'command': 'maybe',
+ 'files': {'foo': 42},
'read_only': 2,
'relative_cwd': None,
- 'resultdir': '2',
- 'resultfile': [],
- 'variables': {
- 'unexpected': 'seriously',
- # This value is updated.
- 'expected': 'stale',
- },
}
+ self.assertEquals(expected, isolate.Result.load(values).flatten())
+
+ def test_result_load_unexpected(self):
+ values = {
+ 'foo': 'bar',
+ }
+ try:
+ isolate.Result.load(values)
+ self.fail()
+ except AssertionError:
+ pass
+
+ def test_savedstate_load_empty(self):
+ values = {
+ }
+ expected = {
+ 'isolate_file': None,
+ 'variables': {},
+ }
+ self.assertEquals(expected, isolate.SavedState.load(values).flatten())
- expected_data = {
- u'foo': u'bar',
- u'variables': {
- 'expected': 'very',
- u'unexpected': u'seriously',
- },
+ def test_savedstate_load(self):
+ values = {
+ 'isolate_file': 'maybe',
+ 'variables': {'foo': 42},
+ }
+ expected = {
+ 'isolate_file': 'maybe',
+ 'variables': {'foo': 42},
}
- self._run_process_options(values, {'expected': 'very'}, expected_data)
+ self.assertEquals(expected, isolate.SavedState.load(values).flatten())
if __name__ == '__main__':