summaryrefslogtreecommitdiffstats
path: root/tools/isolate_driver.py
diff options
context:
space:
mode:
authormaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-09 01:43:37 +0000
committermaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-09 01:43:37 +0000
commitb951b91f533b632c1bbbce8b5389a7bca872277e (patch)
tree3ac55739b5e8a2899c73330985b8e97b20687451 /tools/isolate_driver.py
parente681b3dc58a974f2810d0009acf9b201699e4f21 (diff)
downloadchromium_src-b951b91f533b632c1bbbce8b5389a7bca872277e.zip
chromium_src-b951b91f533b632c1bbbce8b5389a7bca872277e.tar.gz
chromium_src-b951b91f533b632c1bbbce8b5389a7bca872277e.tar.bz2
isolate_driver: Enable ninja parsing code all the time.
Split the ninja parsing code in its own file so that isolate_driver is focused on .isolate management. - This removes the need to specify the majority of binary runtime dependencies. Sadly, this means a few unrelated binaries are now pulled in, for example d8, libvpx_obj_int_extract and tls_edit. That's a maintenance/performance trade off. - Removed some dependencies for unit_tests.isolate to ensure this works. R=vadimsh@chromium.org BUG=368543 Review URL: https://codereview.chromium.org/270333002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@269140 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools/isolate_driver.py')
-rwxr-xr-xtools/isolate_driver.py164
1 files changed, 6 insertions, 158 deletions
diff --git a/tools/isolate_driver.py b/tools/isolate_driver.py
index 4db823e..bf1103a 100755
--- a/tools/isolate_driver.py
+++ b/tools/isolate_driver.py
@@ -20,172 +20,23 @@ WARNING: The target to use for build.ninja analysis is the base name of the
"""
import StringIO
-import glob
import logging
import os
import posixpath
import subprocess
import sys
-import time
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
SRC_DIR = os.path.dirname(TOOLS_DIR)
+import ninja_parser
+
sys.path.insert(0, SWARMING_CLIENT_DIR)
import isolate_format
-def load_ninja_recursively(build_dir, ninja_path, build_steps):
- """Crudely extracts all the subninja and build referenced in ninja_path.
-
- In particular, it ignores rule and variable declarations. The goal is to be
- performant (well, as much as python can be performant) which is currently in
- the <200ms range for a complete chromium tree. As such the code is laid out
- for performance instead of readability.
- """
- logging.debug('Loading %s', ninja_path)
- try:
- with open(os.path.join(build_dir, ninja_path), 'rb') as f:
- line = None
- merge_line = ''
- subninja = []
- for line in f:
- line = line.rstrip()
- if not line:
- continue
-
- if line[-1] == '$':
- # The next line needs to be merged in.
- merge_line += line[:-1]
- continue
-
- if merge_line:
- line = merge_line + line
- merge_line = ''
-
- statement = line[:line.find(' ')]
- if statement == 'build':
- # Save the dependency list as a raw string. Only the lines needed will
- # be processed with raw_build_to_deps(). This saves a good 70ms of
- # processing time.
- build_target, dependencies = line[6:].split(': ', 1)
- # Interestingly, trying to be smart and only saving the build steps
- # with the intended extensions ('', '.stamp', '.so') slows down
- # parsing even if 90% of the build rules can be skipped.
- # On Windows, a single step may generate two target, so split items
- # accordingly. It has only been seen for .exe/.exe.pdb combos.
- for i in build_target.strip().split():
- build_steps[i] = dependencies
- elif statement == 'subninja':
- subninja.append(line[9:])
- except IOError:
- print >> sys.stderr, 'Failed to open %s' % ninja_path
- raise
-
- total = 1
- for rel_path in subninja:
- try:
- # Load each of the files referenced.
- # TODO(maruel): Skip the files known to not be needed. It saves an aweful
- # lot of processing time.
- total += load_ninja_recursively(build_dir, rel_path, build_steps)
- except IOError:
- print >> sys.stderr, '... as referenced by %s' % ninja_path
- raise
- return total
-
-
-def load_ninja(build_dir):
- """Loads the tree of .ninja files in build_dir."""
- build_steps = {}
- total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
- logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
- return build_steps
-
-
-def using_blacklist(item):
- """Returns True if an item should be analyzed.
-
- Ignores many rules that are assumed to not depend on a dynamic library. If
- the assumption doesn't hold true anymore for a file format, remove it from
- this list. This is simply an optimization.
- """
- IGNORED = (
- '.a', '.cc', '.css', '.def', '.h', '.html', '.js', '.json', '.manifest',
- '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.txt',
- )
- # ninja files use native path format.
- ext = os.path.splitext(item)[1]
- if ext in IGNORED:
- return False
- # Special case Windows, keep .dll.lib but discard .lib.
- if item.endswith('.dll.lib'):
- return True
- if ext == '.lib':
- return False
- return item not in ('', '|', '||')
-
-
-def raw_build_to_deps(item):
- """Converts a raw ninja build statement into the list of interesting
- dependencies.
- """
- # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
- # .dll.lib, .exe and empty.
- # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
- return filter(using_blacklist, item.split(' ')[1:])
-
-
-def recurse(target, build_steps, rules_seen):
- """Recursively returns all the interesting dependencies for root_item."""
- out = []
- if rules_seen is None:
- rules_seen = set()
- if target in rules_seen:
- # TODO(maruel): Figure out how it happens.
- logging.warning('Circular dependency for %s!', target)
- return []
- rules_seen.add(target)
- try:
- dependencies = raw_build_to_deps(build_steps[target])
- except KeyError:
- logging.info('Failed to find a build step to generate: %s', target)
- return []
- logging.debug('recurse(%s) -> %s', target, dependencies)
- for dependency in dependencies:
- out.append(dependency)
- dependency_raw_dependencies = build_steps.get(dependency)
- if dependency_raw_dependencies:
- for i in raw_build_to_deps(dependency_raw_dependencies):
- out.extend(recurse(i, build_steps, rules_seen))
- else:
- logging.info('Failed to find a build step to generate: %s', dependency)
- return out
-
-
-def post_process_deps(build_dir, dependencies):
- """Processes the dependency list with OS specific rules."""
- def filter_item(i):
- if i.endswith('.so.TOC'):
- # Remove only the suffix .TOC, not the .so!
- return i[:-4]
- if i.endswith('.dylib.TOC'):
- # Remove only the suffix .TOC, not the .dylib!
- return i[:-4]
- if i.endswith('.dll.lib'):
- # Remove only the suffix .lib, not the .dll!
- return i[:-4]
- return i
-
- # Check for execute access. This gets rid of all the phony rules.
- return [
- i for i in map(filter_item, dependencies)
- if os.access(os.path.join(build_dir, i), os.X_OK)
- ]
-
-
def create_wrapper(args, isolate_index, isolated_index):
"""Creates a wrapper .isolate that add dynamic libs.
@@ -218,8 +69,9 @@ def create_wrapper(args, isolate_index, isolated_index):
# primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
# complain to maruel@.
target = isolate[:-len('.isolate')] + '_run'
- build_steps = load_ninja(build_dir)
- binary_deps = post_process_deps(build_dir, recurse(target, build_steps, None))
+ build_steps = ninja_parser.load_ninja(build_dir)
+ binary_deps = ninja_parser.post_process_deps(
+ build_dir, ninja_parser.recurse(build_dir, target, build_steps))
logging.debug(
'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
@@ -255,20 +107,16 @@ def main():
args = sys.argv[1:]
isolate = None
isolated = None
- is_component = False
for i, arg in enumerate(args):
if arg == '--isolate':
isolate = i + 1
if arg == '--isolated':
isolated = i + 1
- if arg == 'component=shared_library':
- is_component = True
if isolate is None or isolated is None:
print >> sys.stderr, 'Internal failure'
return 1
- if is_component:
- create_wrapper(args, isolate, isolated)
+ create_wrapper(args, isolate, isolated)
swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
sys.stdout.flush()