summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authormaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-05-11 17:25:02 +0000
committermaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-05-11 17:25:02 +0000
commit91ee66f99c4be42b588b823526e2ab5e00ecd0c3 (patch)
treef1f08bd5c04bdbe45170fc9051b6cb6d01da79bc /tools
parent24a09013ea269811a21f446d455c462bf6c54fcc (diff)
downloadchromium_src-91ee66f99c4be42b588b823526e2ab5e00ecd0c3.zip
chromium_src-91ee66f99c4be42b588b823526e2ab5e00ecd0c3.tar.gz
chromium_src-91ee66f99c4be42b588b823526e2ab5e00ecd0c3.tar.bz2
Add scripts to list or trace all test cases in a gtest executable.
list_test_cases.py quickly outputs the test cases in a gtest executable. For browser_tests is takes ~100ms. trace_test_cases.py traces each test individually to be able to generate pretty graphs. read_trace.py is a debugging tool to read a single trace. Refactor trace_inputs.py to make it possible. I changed the semantic for it so that stdin is closed and stderr is redirected to stdout. I also have the logger not look at the logging level to determine if output should be shown. R=rogerta@chromium.org BUG=98636 TEST= Review URL: https://chromiumcodereview.appspot.com/10377105 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@136601 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools')
-rwxr-xr-xtools/isolate/isolate.py4
-rwxr-xr-xtools/isolate/isolate_smoke_test.py4
-rwxr-xr-xtools/isolate/list_test_cases.py102
-rwxr-xr-xtools/isolate/read_trace.py74
-rwxr-xr-xtools/isolate/trace_inputs.py288
-rwxr-xr-xtools/isolate/trace_test_cases.py209
6 files changed, 545 insertions, 136 deletions
diff --git a/tools/isolate/isolate.py b/tools/isolate/isolate.py
index 57346eb..b9acc10 100755
--- a/tools/isolate/isolate.py
+++ b/tools/isolate/isolate.py
@@ -706,9 +706,9 @@ def main():
'For the other modes, defaults to the directory containing --result')
options, args = parser.parse_args()
- level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)]
+ levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(
- level=level,
+ level=levels[min(len(levels)-1, options.verbose)],
format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s')
if not options.mode:
diff --git a/tools/isolate/isolate_smoke_test.py b/tools/isolate/isolate_smoke_test.py
index f16930a..d0dd9a7 100755
--- a/tools/isolate/isolate_smoke_test.py
+++ b/tools/isolate/isolate_smoke_test.py
@@ -460,7 +460,7 @@ class Isolate_trace(IsolateBase):
def test_fail(self):
try:
- self._execute('trace', 'fail.isolate', [], True)
+ self._execute('trace', 'fail.isolate', ['-v'], True)
self.fail()
except subprocess.CalledProcessError, e:
out = e.output
@@ -469,7 +469,7 @@ class Isolate_trace(IsolateBase):
# In theory, there should be 2 \n at the end of expected but for an
# unknown reason there's 3 \n on Windows so just rstrip() and compare the
# text, that's sufficient for this test.
- expected = 'Failure: 1\nFailing'
+ expected = 'Failing'
self.assertEquals(expected, out.rstrip())
def test_missing_trailing_slash(self):
diff --git a/tools/isolate/list_test_cases.py b/tools/isolate/list_test_cases.py
new file mode 100755
index 0000000..6c1cf49
--- /dev/null
+++ b/tools/isolate/list_test_cases.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""List all the test cases for a google test.
+
+See more info at http://code.google.com/p/googletest/.
+"""
+
+import optparse
+import subprocess
+import sys
+
+
+class Failure(Exception):
+ pass
+
+
+def gtest_list_tests(executable):
+ cmd = [executable, '--gtest_list_tests']
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ if p.returncode:
+ raise Failure('Failed to run %s\n%s' % (executable, err), p.returncode)
+ # pylint: disable=E1103
+ if err and not err.startswith('Xlib: extension "RANDR" missing on display '):
+ raise Failure('Unexpected spew:\n%s' % err, 1)
+ return out
+
+
+def _starts_with(a, b, prefix):
+ return a.startswith(prefix) or b.startswith(prefix)
+
+
+def parse_gtest_cases(out, disabled=False, fails=False, flaky=False):
+ """Expected format is a concatenation of this:
+ TestFixture1
+ TestCase1
+ TestCase2
+ """
+ tests = []
+ fixture = None
+ lines = out.splitlines()
+ while lines:
+ line = lines.pop(0)
+ if not line:
+ break
+ if not line.startswith(' '):
+ fixture = line
+ else:
+ case = line[2:]
+ if case.startswith('YOU HAVE'):
+ # It's a 'YOU HAVE foo bar' line. We're done.
+ break
+ assert ' ' not in case
+
+ if not disabled and _starts_with(fixture, case, 'DISABLED_'):
+ continue
+ if not fails and _starts_with(fixture, case, 'FAILS_'):
+ continue
+ if not flaky and _starts_with(fixture, case, 'FLAKY_'):
+ continue
+
+ tests.append(fixture + case)
+ return tests
+
+
+def main():
+ """CLI frontend to validate arguments."""
+ parser = optparse.OptionParser(
+ usage='%prog <options> [gtest]')
+ parser.add_option(
+ '-d', '--disabled',
+ action='store_true',
+ help='Include DISABLED_ tests')
+ parser.add_option(
+ '-f', '--fails',
+ action='store_true',
+ help='Include FAILS_ tests')
+ parser.add_option(
+ '-F', '--flaky',
+ action='store_true',
+ help='Include FLAKY_ tests')
+ options, args = parser.parse_args()
+ if len(args) != 1:
+ parser.error('Please provide the executable to run')
+
+ try:
+ out = gtest_list_tests(args[0])
+ tests = parse_gtest_cases(
+ out, options.disabled, options.fails, options.flaky)
+ for test in tests:
+ print test
+ except Failure, e:
+ print e.args[0]
+ return e.args[1]
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/isolate/read_trace.py b/tools/isolate/read_trace.py
new file mode 100755
index 0000000..fda4923
--- /dev/null
+++ b/tools/isolate/read_trace.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Reads a trace. Mostly for testing."""
+
+import logging
+import optparse
+import os
+import sys
+
+import trace_inputs
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+
+
+def read_trace(logname, root_dir, cwd_dir, product_dir):
+ # Resolve any symlink
+ root_dir = os.path.realpath(root_dir)
+ api = trace_inputs.get_api()
+ _, _, _, _, simplified = trace_inputs.load_trace(logname, root_dir, api)
+ variables = trace_inputs.generate_dict(simplified, cwd_dir, product_dir)
+ trace_inputs.pretty_print(variables, sys.stdout)
+
+
+def main():
+ """CLI frontend to validate arguments."""
+ parser = optparse.OptionParser(
+ usage='%prog <options> [gtest]')
+ parser.add_option(
+ '-v', '--verbose',
+ action='count',
+ default=0,
+ help='Use up to 3 times to increase logging level')
+ parser.add_option(
+ '-c', '--cwd',
+ default='chrome',
+ help='Signal to start the process from this relative directory. When '
+ 'specified, outputs the inputs files in a way compatible for '
+ 'gyp processing. Should be set to the relative path containing the '
+ 'gyp file, e.g. \'chrome\' or \'net\'')
+ parser.add_option(
+ '-p', '--product-dir',
+ default='out/Release',
+ help='Directory for PRODUCT_DIR. Default: %default')
+ parser.add_option(
+ '--root-dir',
+ default=ROOT_DIR,
+ help='Root directory to base everything off. Default: %default')
+ options, args = parser.parse_args()
+
+ level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)]
+ logging.basicConfig(
+ level=level,
+ format='%(levelname)5s %(module)15s(%(lineno)3d):%(message)s')
+
+ if len(args) != 1:
+ parser.error('Please provide the log to read')
+ if not options.product_dir:
+ parser.error('--product-dir is required')
+ if not options.cwd:
+ parser.error('--cwd is required')
+
+ return read_trace(
+ args[0],
+ options.root_dir,
+ options.cwd,
+ options.product_dir)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/isolate/trace_inputs.py b/tools/isolate/trace_inputs.py
index d031270..78918a6 100755
--- a/tools/isolate/trace_inputs.py
+++ b/tools/isolate/trace_inputs.py
@@ -201,6 +201,17 @@ def posix_relpath(path, root):
return out
+def cleanup_path(x):
+ """Cleans up a relative path. Converts any os.path.sep to '/' on Windows."""
+ if x:
+ x = x.rstrip(os.path.sep).replace(os.path.sep, '/')
+ if x == '.':
+ x = ''
+ if x:
+ x += '/'
+ return x
+
+
class Strace(object):
"""strace implies linux."""
IGNORED = (
@@ -379,18 +390,22 @@ class Strace(object):
self.non_existent.add(filepath)
@classmethod
- def gen_trace(cls, cmd, cwd, logname):
+ def gen_trace(cls, cmd, cwd, logname, output):
"""Runs strace on an executable."""
- logging.info('gen_trace(%s, %s, %s)' % (cmd, cwd, logname))
- silent = not isEnabledFor(logging.INFO)
+ logging.info('gen_trace(%s, %s, %s, %s)' % (cmd, cwd, logname, output))
stdout = stderr = None
- if silent:
- stdout = stderr = subprocess.PIPE
+ if output:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
traces = ','.join(cls.Context.traces())
trace_cmd = ['strace', '-f', '-e', 'trace=%s' % traces, '-o', logname]
child = subprocess.Popen(
- trace_cmd + cmd, cwd=cwd, stdout=stdout, stderr=stderr)
- out, err = child.communicate()
+ trace_cmd + cmd,
+ cwd=cwd,
+ stdin=subprocess.PIPE,
+ stdout=stdout,
+ stderr=stderr)
+ out = child.communicate()[0]
# Once it's done, inject a chdir() call to cwd to be able to reconstruct
# the full paths.
# TODO(maruel): cwd should be saved at each process creation, so forks needs
@@ -402,15 +417,7 @@ class Strace(object):
pid = content.split(' ', 1)[0]
f.write('%s chdir("%s") = 0\n' % (pid, cwd))
f.write(content)
-
- if child.returncode != 0:
- print 'Failure: %d' % child.returncode
- # pylint: disable=E1103
- if out:
- print ''.join(out.splitlines(True)[-100:])
- if err:
- print ''.join(err.splitlines(True)[-100:])
- return child.returncode
+ return child.returncode, out
@classmethod
def parse_log(cls, filename, blacklist):
@@ -693,18 +700,18 @@ class Dtrace(object):
logging.debug('%d %s(%s) = %s' % (pid, function, args, result))
@classmethod
- def gen_trace(cls, cmd, cwd, logname):
+ def gen_trace(cls, cmd, cwd, logname, output):
"""Runs dtrace on an executable."""
- logging.info('gen_trace(%s, %s, %s)' % (cmd, cwd, logname))
- silent = not isEnabledFor(logging.INFO)
+ logging.info('gen_trace(%s, %s, %s, %s)' % (cmd, cwd, logname, output))
logging.info('Running: %s' % cmd)
signal = 'Go!'
logging.debug('Our pid: %d' % os.getpid())
# Part 1: start the child process.
stdout = stderr = None
- if silent:
- stdout = stderr = subprocess.PIPE
+ if output:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
child_cmd = [
sys.executable, os.path.join(BASE_DIR, 'trace_child_process.py'),
]
@@ -745,7 +752,7 @@ class Dtrace(object):
# Part 4: We can now tell our child to go.
# TODO(maruel): Another pipe than stdin could be used instead. This would
# be more consistent with the other tracing methods.
- out, err = child.communicate(signal)
+ out = child.communicate(signal)[0]
dtrace.wait()
if dtrace.returncode != 0:
@@ -758,19 +765,12 @@ class Dtrace(object):
# Short the log right away to simplify our life. There isn't much
# advantage in keeping it out of order.
cls._sort_log(logname)
- if child.returncode != 0:
- print 'Failure: %d' % child.returncode
- # pylint: disable=E1103
- if out:
- print ''.join(out.splitlines(True)[-100:])
- if err:
- print ''.join(err.splitlines(True)[-100:])
except KeyboardInterrupt:
# Still sort when testing.
cls._sort_log(logname)
raise
- return dtrace.returncode or child.returncode
+ return dtrace.returncode or child.returncode, out
@classmethod
def parse_log(cls, filename, blacklist):
@@ -1014,16 +1014,16 @@ class LogmanTrace(object):
self.IGNORED = tuple(sorted(self.IGNORED))
@classmethod
- def gen_trace(cls, cmd, cwd, logname):
- logging.info('gen_trace(%s, %s, %s)' % (cmd, cwd, logname))
+ def gen_trace(cls, cmd, cwd, logname, output):
+ logging.info('gen_trace(%s, %s, %s, %s)' % (cmd, cwd, logname, output))
# Use "logman -?" for help.
etl = logname + '.etl'
- silent = not isEnabledFor(logging.INFO)
stdout = stderr = None
- if silent:
- stdout = stderr = subprocess.PIPE
+ if output:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
# 1. Start the log collection. Requires administrative access. logman.exe is
# synchronous so no need for a "warmup" call.
@@ -1039,13 +1039,18 @@ class LogmanTrace(object):
'-ets', # Send directly to kernel
]
logging.debug('Running: %s' % cmd_start)
- subprocess.check_call(cmd_start, stdout=stdout, stderr=stderr)
+ subprocess.check_call(
+ cmd_start,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
# 2. Run the child process.
logging.debug('Running: %s' % cmd)
try:
- child = subprocess.Popen(cmd, cwd=cwd, stdout=stdout, stderr=stderr)
- out, err = child.communicate()
+ child = subprocess.Popen(
+ cmd, cwd=cwd, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr)
+ out = child.communicate()[0]
finally:
# 3. Stop the log collection.
cmd_stop = [
@@ -1055,7 +1060,11 @@ class LogmanTrace(object):
'-ets', # Send directly to kernel
]
logging.debug('Running: %s' % cmd_stop)
- subprocess.check_call(cmd_stop, stdout=stdout, stderr=stderr)
+ subprocess.check_call(
+ cmd_stop,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
# 4. Convert the traces to text representation.
# Use "tracerpt -?" for help.
@@ -1086,16 +1095,10 @@ class LogmanTrace(object):
else:
assert False, logformat
logging.debug('Running: %s' % cmd_convert)
- subprocess.check_call(cmd_convert, stdout=stdout, stderr=stderr)
+ subprocess.check_call(
+ cmd_convert, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr)
- if child.returncode != 0:
- print 'Failure: %d' % child.returncode
- # pylint: disable=E1103
- if out:
- print ''.join(out.splitlines(True)[-100:])
- if err:
- print ''.join(err.splitlines(True)[-100:])
- return child.returncode
+ return child.returncode, out
@classmethod
def parse_log(cls, filename, blacklist):
@@ -1265,6 +1268,99 @@ def pretty_print(variables, stdout):
stdout.write('}\n')
+def get_api():
+ flavor = get_flavor()
+ if flavor == 'linux':
+ return Strace()
+ elif flavor == 'mac':
+ return Dtrace()
+ elif sys.platform == 'win32':
+ return LogmanTrace()
+ else:
+ print >> sys.stderr, 'Unsupported platform %s' % sys.platform
+ sys.exit(1)
+
+
+def get_blacklist(api):
+ """Returns a function to filter unimportant files normally ignored."""
+ git_path = os.path.sep + '.git' + os.path.sep
+ svn_path = os.path.sep + '.svn' + os.path.sep
+ return lambda f: (
+ f.startswith(api.IGNORED) or
+ f.endswith('.pyc') or
+ git_path in f or
+ svn_path in f)
+
+
+def generate_dict(files, cwd_dir, product_dir):
+ """Converts the list of files into a .isolate dictionary.
+
+ Arguments:
+ - files: list of files to generate a dictionary out of.
+ - cwd_dir: directory to base all the files from, relative to root_dir.
+ - product_dir: directory to replace with <(PRODUCT_DIR), relative to root_dir.
+ """
+ cwd_dir = cleanup_path(cwd_dir)
+ product_dir = cleanup_path(product_dir)
+
+ def fix(f):
+ """Bases the file on the most restrictive variable."""
+ logging.debug('fix(%s)' % f)
+ # Important, GYP stores the files with / and not \.
+ f = f.replace(os.path.sep, '/')
+ if product_dir and f.startswith(product_dir):
+ return '<(PRODUCT_DIR)/%s' % f[len(product_dir):]
+ else:
+ # cwd_dir is usually the directory containing the gyp file. It may be
+ # empty if the whole directory containing the gyp file is needed.
+ return posix_relpath(f, cwd_dir) or './'
+
+ corrected = [fix(f) for f in files]
+ tracked = [f for f in corrected if not f.endswith('/') and ' ' not in f]
+ untracked = [f for f in corrected if f.endswith('/') or ' ' in f]
+ variables = {}
+ if tracked:
+ variables[KEY_TRACKED] = tracked
+ if untracked:
+ variables[KEY_UNTRACKED] = untracked
+ return variables
+
+
+def trace(logfile, cmd, cwd, api, output):
+ """Traces an executable. Returns (returncode, output) from api.
+
+ Arguments:
+ - logfile: file to write to.
+ - cmd: command to run.
+ - cwd: current directory to start the process in.
+ - api: a tracing api instance.
+ - output: if True, returns output, otherwise prints it at the console.
+ """
+ cmd = fix_python_path(cmd)
+ assert os.path.isabs(cmd[0]), cmd[0]
+ if os.path.isfile(logfile):
+ os.remove(logfile)
+ return api.gen_trace(cmd, cwd, logfile, output)
+
+
+def load_trace(logfile, root_dir, api):
+ """Loads a trace file and returns the processed file lists.
+
+ Arguments:
+ - logfile: file to load.
+ - root_dir: root directory to use to determine if a file is relevant to the
+ trace or not.
+ - api: a tracing api instance.
+ """
+ files, non_existent = api.parse_log(logfile, get_blacklist(api))
+ expected, unexpected = relevant_files(
+ files, root_dir.rstrip(os.path.sep) + os.path.sep)
+ # In case the file system is case insensitive.
+ expected = sorted(set(get_native_path_case(root_dir, f) for f in expected))
+ simplified = extract_directories(expected, root_dir)
+ return files, expected, unexpected, non_existent, simplified
+
+
def trace_inputs(logfile, cmd, root_dir, cwd_dir, product_dir, force_trace):
"""Tries to load the logs if available. If not, trace the test.
@@ -1286,121 +1382,49 @@ def trace_inputs(logfile, cmd, root_dir, cwd_dir, product_dir, force_trace):
'trace_inputs(%s, %s, %s, %s, %s, %s)' % (
logfile, cmd, root_dir, cwd_dir, product_dir, force_trace))
+ def print_if(txt):
+ if cwd_dir is None:
+ print txt
+
# It is important to have unambiguous path.
assert os.path.isabs(root_dir), root_dir
assert os.path.isabs(logfile), logfile
assert not cwd_dir or not os.path.isabs(cwd_dir), cwd_dir
assert not product_dir or not os.path.isabs(product_dir), product_dir
- cmd = fix_python_path(cmd)
- assert (
- (os.path.isfile(logfile) and not force_trace) or os.path.isabs(cmd[0])
- ), cmd[0]
-
+ api = get_api()
# Resolve any symlink
root_dir = os.path.realpath(root_dir)
-
- def print_if(txt):
- if cwd_dir is None:
- print(txt)
-
- flavor = get_flavor()
- if flavor == 'linux':
- api = Strace()
- elif flavor == 'mac':
- api = Dtrace()
- elif sys.platform == 'win32':
- api = LogmanTrace()
- else:
- print >> sys.stderr, 'Unsupported platform %s' % sys.platform
- return 1
-
if not os.path.isfile(logfile) or force_trace:
- if os.path.isfile(logfile):
- os.remove(logfile)
print_if('Tracing... %s' % cmd)
- cwd = root_dir
# Use the proper relative directory.
- if cwd_dir:
- cwd = os.path.join(cwd, cwd_dir)
- returncode = api.gen_trace(cmd, cwd, logfile)
+ cwd = root_dir if not cwd_dir else os.path.join(root_dir, cwd_dir)
+ silent = not isEnabledFor(logging.WARNING)
+ returncode, _ = trace(logfile, cmd, cwd, api, silent)
if returncode and not force_trace:
return returncode
- git_path = os.path.sep + '.git' + os.path.sep
- svn_path = os.path.sep + '.svn' + os.path.sep
- def blacklist(f):
- """Strips ignored paths."""
- return (
- f.startswith(api.IGNORED) or
- f.endswith('.pyc') or
- git_path in f or
- svn_path in f)
-
print_if('Loading traces... %s' % logfile)
- files, non_existent = api.parse_log(logfile, blacklist)
+ files, expected, unexpected, non_existent, simplified = load_trace(
+ logfile, root_dir, api)
print_if('Total: %d' % len(files))
print_if('Non existent: %d' % len(non_existent))
for f in non_existent:
print_if(' %s' % f)
-
- expected, unexpected = relevant_files(
- files, root_dir.rstrip(os.path.sep) + os.path.sep)
if unexpected:
print_if('Unexpected: %d' % len(unexpected))
for f in unexpected:
print_if(' %s' % f)
-
- # In case the file system is case insensitive.
- expected = sorted(set(get_native_path_case(root_dir, f) for f in expected))
-
- simplified = extract_directories(expected, root_dir)
print_if('Interesting: %d reduced to %d' % (len(expected), len(simplified)))
for f in simplified:
print_if(' %s' % f)
if cwd_dir is not None:
- def cleanuppath(x):
- """Cleans up a relative path. Converts any os.path.sep to '/' on Windows.
- """
- if x:
- x = x.rstrip(os.path.sep).replace(os.path.sep, '/')
- if x == '.':
- x = ''
- if x:
- x += '/'
- return x
-
- # Both are relative directories to root_dir.
- cwd_dir = cleanuppath(cwd_dir)
- product_dir = cleanuppath(product_dir)
-
- def fix(f):
- """Bases the file on the most restrictive variable."""
- logging.debug('fix(%s)' % f)
- # Important, GYP stores the files with / and not \.
- f = f.replace(os.path.sep, '/')
-
- if product_dir and f.startswith(product_dir):
- return '<(PRODUCT_DIR)/%s' % f[len(product_dir):]
- else:
- # cwd_dir is usually the directory containing the gyp file. It may be
- # empty if the whole directory containing the gyp file is needed.
- return posix_relpath(f, cwd_dir) or './'
-
- corrected = [fix(f) for f in simplified]
- tracked = [f for f in corrected if not f.endswith('/') and ' ' not in f]
- untracked = [f for f in corrected if f.endswith('/') or ' ' in f]
- variables = {}
- if tracked:
- variables[KEY_TRACKED] = tracked
- if untracked:
- variables[KEY_UNTRACKED] = untracked
value = {
'conditions': [
- ['OS=="%s"' % flavor, {
- 'variables': variables,
+ ['OS=="%s"' % get_flavor(), {
+ 'variables': generate_dict(simplified, cwd_dir, product_dir),
}],
],
}
diff --git a/tools/isolate/trace_test_cases.py b/tools/isolate/trace_test_cases.py
new file mode 100755
index 0000000..41abb9b
--- /dev/null
+++ b/tools/isolate/trace_test_cases.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A manual version of trace_inputs.py that is specialized in tracing each
+google-test test case individually.
+
+This is mainly written to work around bugs in strace w.r.t. browser_tests.
+"""
+
+import fnmatch
+import json
+import multiprocessing
+import optparse
+import os
+import sys
+import tempfile
+import time
+
+import list_test_cases
+import trace_inputs
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
+
+
+def trace_test_case(
+ test_case, executable, root_dir, cwd_dir, product_dir, leak):
+ """Traces a single test case and returns the .isolate compatible variable
+ dict.
+ """
+ # Resolve any symlink
+ root_dir = os.path.realpath(root_dir)
+
+ api = trace_inputs.get_api()
+ cmd = [executable, '--gtest_filter=%s' % test_case]
+
+ if not leak:
+ f, logname = tempfile.mkstemp(prefix='trace')
+ os.close(f)
+ else:
+ logname = '%s.%s.log' % (executable, test_case.replace('/', '-'))
+ f = None
+
+ try:
+ for i in range(3):
+ start = time.time()
+ returncode, output = trace_inputs.trace(
+ logname, cmd, os.path.join(root_dir, cwd_dir), api, True)
+ if returncode and i != 2:
+ print '\nFailed while running: %s' % ' '.join(cmd)
+ print 'Trying again!'
+ continue
+ duration = time.time() - start
+ try:
+ _, _, _, _, simplified = trace_inputs.load_trace(logname, root_dir, api)
+ break
+ except Exception:
+ print '\nFailed loading the trace for: %s' % ' '.join(cmd)
+ print 'Trying again!'
+
+ variables = trace_inputs.generate_dict(simplified, cwd_dir, product_dir)
+ return {
+ 'case': test_case,
+ 'variables': variables,
+ 'result': returncode,
+ 'duration': duration,
+ 'output': output,
+ }
+ finally:
+ if f:
+ os.remove(logname)
+
+
+def task(args):
+ """Adaptor for multiprocessing.Pool().imap_unordered().
+
+ It is executed asynchronously.
+ """
+ return trace_test_case(*args)
+
+
+def get_test_cases(executable, skip):
+ """Returns the filtered list of test cases.
+
+ This is done synchronously.
+ """
+ try:
+ out = list_test_cases.gtest_list_tests(executable)
+ except list_test_cases.Failure, e:
+ print e.args[0]
+ return None
+
+ tests = list_test_cases.parse_gtest_cases(out)
+ tests = [t for t in tests if not any(fnmatch.fnmatch(t, s) for s in skip)]
+ print 'Found %d test cases in %s' % (len(tests), os.path.basename(executable))
+ return tests
+
+
+def trace_test_cases(
+ executable, root_dir, cwd_dir, product_dir, leak, skip, jobs, timeout):
+ """Traces test cases one by one."""
+ tests = get_test_cases(executable, skip)
+ if not tests:
+ return
+
+ last_line = ''
+ out = {}
+ index = 0
+ pool = multiprocessing.Pool(processes=jobs)
+ start = time.time()
+ try:
+ g = ((t, executable, root_dir, cwd_dir, product_dir, leak) for t in tests)
+ it = pool.imap_unordered(task, g)
+ while True:
+ try:
+ result = it.next(timeout=timeout)
+ except StopIteration:
+ break
+ case = result.pop('case')
+ index += 1
+ line = '%d of %d (%.1f%%), %.1fs: %s' % (
+ index,
+ len(tests),
+ index * 100. / len(tests),
+ time.time() - start,
+ case)
+ sys.stdout.write(
+ '\r%s%s' % (line, ' ' * max(0, len(last_line) - len(line))))
+ sys.stdout.flush()
+ last_line = line
+ # TODO(maruel): Retry failed tests.
+ out[case] = result
+ return 0
+ except multiprocessing.TimeoutError, e:
+ print 'Got a timeout while processing a task item %s' % e
+ # Be sure to stop the pool on exception.
+ pool.terminate()
+ return 1
+ except Exception, e:
+ # Be sure to stop the pool on exception.
+ pool.terminate()
+ raise
+ finally:
+ with open('%s.test_cases' % executable, 'w') as f:
+ json.dump(out, f, indent=2, sort_keys=True)
+ pool.close()
+ pool.join()
+
+
+def main():
+ """CLI frontend to validate arguments."""
+ parser = optparse.OptionParser(
+ usage='%prog <options> [gtest]')
+ parser.allow_interspersed_args = False
+ parser.add_option(
+ '-c', '--cwd',
+ default='chrome',
+ help='Signal to start the process from this relative directory. When '
+ 'specified, outputs the inputs files in a way compatible for '
+ 'gyp processing. Should be set to the relative path containing the '
+ 'gyp file, e.g. \'chrome\' or \'net\'')
+ parser.add_option(
+ '-p', '--product-dir',
+ default='out/Release',
+ help='Directory for PRODUCT_DIR. Default: %default')
+ parser.add_option(
+ '--root-dir',
+ default=ROOT_DIR,
+ help='Root directory to base everything off. Default: %default')
+ parser.add_option(
+ '-l', '--leak',
+ action='store_true',
+ help='Leak trace files')
+ parser.add_option(
+ '-s', '--skip',
+ default=[],
+ action='append',
+ help='filter to apply to test cases to skip, wildcard-style')
+ parser.add_option(
+ '-j', '--jobs',
+ type='int',
+ help='number of parallel jobs')
+ parser.add_option(
+ '-t', '--timeout',
+ default=120,
+ type='int',
+ help='number of parallel jobs')
+ options, args = parser.parse_args()
+
+ if len(args) != 1:
+ parser.error(
+ 'Please provide the executable line to run, if you need fancy things '
+ 'like xvfb, start this script from inside xvfb, it\'ll be faster.')
+ executable = os.path.join(options.root_dir, options.product_dir, args[0])
+ return trace_test_cases(
+ executable,
+ options.root_dir,
+ options.cwd,
+ options.product_dir,
+ options.leak,
+ options.skip,
+ options.jobs,
+ options.timeout)
+
+
+if __name__ == '__main__':
+ sys.exit(main())