summaryrefslogtreecommitdiffstats
path: root/tools/isolate_driver.py
diff options
context:
space:
mode:
authorfalken@chromium.org <falken@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-09 03:39:28 +0000
committerfalken@chromium.org <falken@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-09 03:39:28 +0000
commit3a71f652b80c6e794f8ef8879221db20dc9c255b (patch)
tree25b1759dfffc605028bf62bd2feafc5e4434dbb0 /tools/isolate_driver.py
parent268ad51ae61b13cd26086cc3016f57a0c3851cad (diff)
downloadchromium_src-3a71f652b80c6e794f8ef8879221db20dc9c255b.zip
chromium_src-3a71f652b80c6e794f8ef8879221db20dc9c255b.tar.gz
chromium_src-3a71f652b80c6e794f8ef8879221db20dc9c255b.tar.bz2
Revert of Enable the ninja parsing code all the time. (https://codereview.chromium.org/270333002/)
Reason for revert: Speculative revert. Maybe the cause of various bot failures involving "A .isolate file is required" Logs: http://build.chromium.org/p/chromium.linux/builders/Linux%20Tests/builds/10820 http://build.chromium.org/p/chromium.win/builders/Vista%20Tests%20%283%29/builds/39788 Snippet from Linux Tests log: /usr/bin/python /mnt/data/b/build/slave/Linux_Tests/build/src/tools/swarming_client/isolate.py run --isolated /mnt/data/b/build/slave/Linux_Tests/build/src/out/Release/interactive_ui_tests.isolated -v -- --no-cr --test-launcher-bot-mode --gtest_output=xml:/mnt/data/b/build/slave/Linux_Tests/build/gtest-results/interactive_ui_tests/interactive_ui_tests.xml --test-launcher-summary-output=/tmp/tmpldQYtj [------ Swarming Error ------] A .isolate file is required. Traceback (most recent call last): File "/mnt/data/b/build/slave/Linux_Tests/build/src/tools/swarming_client/isolate.py", line 1516, in main return dispatcher.execute(OptionParserIsolate(version=__version__), argv) File "/mnt/data/b/build/slave/Linux_Tests/build/src/tools/swarming_client/third_party/depot_tools/subcommand.py", line 242, in execute return command(parser, args[1:]) File "/mnt/data/b/build/slave/Linux_Tests/build/src/tools/swarming_client/isolate.py", line 1251, in CMDrun options, os.getcwd(), None, options.skip_refresh) File "/mnt/data/b/build/slave/Linux_Tests/build/src/tools/swarming_client/isolate.py", line 863, in load_complete_state raise ExecutionError('A .isolate file is required.') ExecutionError: A .isolate file is required. Original issue's description: > isolate_driver: Enable ninja parsing code all the time. > > Split the ninja parsing code in its own file so that isolate_driver is focused > on .isolate management. > > - This removes the need to specify the majority of binary runtime dependencies. > Sadly, this means a few unrelated binaries are now pulled in, for example d8, > libvpx_obj_int_extract and tls_edit. That's a maintenance/performance trade > off. > - Removed some dependencies for unit_tests.isolate to ensure this works. > > R=vadimsh@chromium.org > BUG=368543 > > Committed: https://src.chromium.org/viewvc/chrome?view=rev&revision=269140 TBR=vadimsh@chromium.org,maruel@chromium.org NOTREECHECKS=true NOTRY=true BUG=368543 Review URL: https://codereview.chromium.org/272113002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@269163 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools/isolate_driver.py')
-rwxr-xr-xtools/isolate_driver.py164
1 files changed, 158 insertions, 6 deletions
diff --git a/tools/isolate_driver.py b/tools/isolate_driver.py
index bf1103a..4db823e 100755
--- a/tools/isolate_driver.py
+++ b/tools/isolate_driver.py
@@ -20,23 +20,172 @@ WARNING: The target to use for build.ninja analysis is the base name of the
"""
import StringIO
+import glob
import logging
import os
import posixpath
import subprocess
import sys
+import time
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
SRC_DIR = os.path.dirname(TOOLS_DIR)
-import ninja_parser
-
sys.path.insert(0, SWARMING_CLIENT_DIR)
import isolate_format
+def load_ninja_recursively(build_dir, ninja_path, build_steps):
+ """Crudely extracts all the subninja and build referenced in ninja_path.
+
+ In particular, it ignores rule and variable declarations. The goal is to be
+ performant (well, as much as python can be performant) which is currently in
+ the <200ms range for a complete chromium tree. As such the code is laid out
+ for performance instead of readability.
+ """
+ logging.debug('Loading %s', ninja_path)
+ try:
+ with open(os.path.join(build_dir, ninja_path), 'rb') as f:
+ line = None
+ merge_line = ''
+ subninja = []
+ for line in f:
+ line = line.rstrip()
+ if not line:
+ continue
+
+ if line[-1] == '$':
+ # The next line needs to be merged in.
+ merge_line += line[:-1]
+ continue
+
+ if merge_line:
+ line = merge_line + line
+ merge_line = ''
+
+ statement = line[:line.find(' ')]
+ if statement == 'build':
+ # Save the dependency list as a raw string. Only the lines needed will
+ # be processed with raw_build_to_deps(). This saves a good 70ms of
+ # processing time.
+ build_target, dependencies = line[6:].split(': ', 1)
+ # Interestingly, trying to be smart and only saving the build steps
+ # with the intended extensions ('', '.stamp', '.so') slows down
+ # parsing even if 90% of the build rules can be skipped.
+ # On Windows, a single step may generate two target, so split items
+ # accordingly. It has only been seen for .exe/.exe.pdb combos.
+ for i in build_target.strip().split():
+ build_steps[i] = dependencies
+ elif statement == 'subninja':
+ subninja.append(line[9:])
+ except IOError:
+ print >> sys.stderr, 'Failed to open %s' % ninja_path
+ raise
+
+ total = 1
+ for rel_path in subninja:
+ try:
+ # Load each of the files referenced.
+ # TODO(maruel): Skip the files known to not be needed. It saves an aweful
+ # lot of processing time.
+ total += load_ninja_recursively(build_dir, rel_path, build_steps)
+ except IOError:
+ print >> sys.stderr, '... as referenced by %s' % ninja_path
+ raise
+ return total
+
+
+def load_ninja(build_dir):
+ """Loads the tree of .ninja files in build_dir."""
+ build_steps = {}
+ total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
+ logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
+ return build_steps
+
+
+def using_blacklist(item):
+ """Returns True if an item should be analyzed.
+
+ Ignores many rules that are assumed to not depend on a dynamic library. If
+ the assumption doesn't hold true anymore for a file format, remove it from
+ this list. This is simply an optimization.
+ """
+ IGNORED = (
+ '.a', '.cc', '.css', '.def', '.h', '.html', '.js', '.json', '.manifest',
+ '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.txt',
+ )
+ # ninja files use native path format.
+ ext = os.path.splitext(item)[1]
+ if ext in IGNORED:
+ return False
+ # Special case Windows, keep .dll.lib but discard .lib.
+ if item.endswith('.dll.lib'):
+ return True
+ if ext == '.lib':
+ return False
+ return item not in ('', '|', '||')
+
+
+def raw_build_to_deps(item):
+ """Converts a raw ninja build statement into the list of interesting
+ dependencies.
+ """
+ # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
+ # .dll.lib, .exe and empty.
+ # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
+ return filter(using_blacklist, item.split(' ')[1:])
+
+
+def recurse(target, build_steps, rules_seen):
+ """Recursively returns all the interesting dependencies for root_item."""
+ out = []
+ if rules_seen is None:
+ rules_seen = set()
+ if target in rules_seen:
+ # TODO(maruel): Figure out how it happens.
+ logging.warning('Circular dependency for %s!', target)
+ return []
+ rules_seen.add(target)
+ try:
+ dependencies = raw_build_to_deps(build_steps[target])
+ except KeyError:
+ logging.info('Failed to find a build step to generate: %s', target)
+ return []
+ logging.debug('recurse(%s) -> %s', target, dependencies)
+ for dependency in dependencies:
+ out.append(dependency)
+ dependency_raw_dependencies = build_steps.get(dependency)
+ if dependency_raw_dependencies:
+ for i in raw_build_to_deps(dependency_raw_dependencies):
+ out.extend(recurse(i, build_steps, rules_seen))
+ else:
+ logging.info('Failed to find a build step to generate: %s', dependency)
+ return out
+
+
+def post_process_deps(build_dir, dependencies):
+ """Processes the dependency list with OS specific rules."""
+ def filter_item(i):
+ if i.endswith('.so.TOC'):
+ # Remove only the suffix .TOC, not the .so!
+ return i[:-4]
+ if i.endswith('.dylib.TOC'):
+ # Remove only the suffix .TOC, not the .dylib!
+ return i[:-4]
+ if i.endswith('.dll.lib'):
+ # Remove only the suffix .lib, not the .dll!
+ return i[:-4]
+ return i
+
+ # Check for execute access. This gets rid of all the phony rules.
+ return [
+ i for i in map(filter_item, dependencies)
+ if os.access(os.path.join(build_dir, i), os.X_OK)
+ ]
+
+
def create_wrapper(args, isolate_index, isolated_index):
"""Creates a wrapper .isolate that add dynamic libs.
@@ -69,9 +218,8 @@ def create_wrapper(args, isolate_index, isolated_index):
# primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
# complain to maruel@.
target = isolate[:-len('.isolate')] + '_run'
- build_steps = ninja_parser.load_ninja(build_dir)
- binary_deps = ninja_parser.post_process_deps(
- build_dir, ninja_parser.recurse(build_dir, target, build_steps))
+ build_steps = load_ninja(build_dir)
+ binary_deps = post_process_deps(build_dir, recurse(target, build_steps, None))
logging.debug(
'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
@@ -107,16 +255,20 @@ def main():
args = sys.argv[1:]
isolate = None
isolated = None
+ is_component = False
for i, arg in enumerate(args):
if arg == '--isolate':
isolate = i + 1
if arg == '--isolated':
isolated = i + 1
+ if arg == 'component=shared_library':
+ is_component = True
if isolate is None or isolated is None:
print >> sys.stderr, 'Internal failure'
return 1
- create_wrapper(args, isolate, isolated)
+ if is_component:
+ create_wrapper(args, isolate, isolated)
swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
sys.stdout.flush()