summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authormaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-10-04 00:08:07 +0000
committermaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-10-04 00:08:07 +0000
commit5dcce7f8260a1d065de8e85b5fb9b35faf94a684 (patch)
treee37f53ce6ad489cee0d151cb5124ba6b9c8ba4be
parente1f5c9beb4952df82815be228e71fa46c219b0b0 (diff)
downloadchromium_src-5dcce7f8260a1d065de8e85b5fb9b35faf94a684.zip
chromium_src-5dcce7f8260a1d065de8e85b5fb9b35faf94a684.tar.gz
chromium_src-5dcce7f8260a1d065de8e85b5fb9b35faf94a684.tar.bz2
Move src/tools/isolate to src/tools/swarm_client@159961 as a DEPS.
src/tools/swarm_client is fetched from tools/swarm_client. TBR=csharp@chromium.org BUG= Review URL: https://codereview.chromium.org/11045023 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@160016 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--.gitignore1
-rw-r--r--DEPS3
-rw-r--r--base/base.gyp2
-rw-r--r--build/common.gypi4
-rw-r--r--chrome/chrome_tests.gypi4
-rw-r--r--net/net.gyp2
-rw-r--r--third_party/cacheinvalidation/cacheinvalidation.gyp2
-rw-r--r--tools/isolate/PRESUBMIT.py28
-rwxr-xr-xtools/isolate/README.py46
-rwxr-xr-xtools/isolate/fix_test_cases.py224
-rwxr-xr-xtools/isolate/isolate.py1923
-rwxr-xr-xtools/isolate/isolate_merge.py68
-rwxr-xr-xtools/isolate/list_test_cases.py54
-rwxr-xr-xtools/isolate/run_test_cases.py904
-rwxr-xr-xtools/isolate/run_test_from_archive.py965
-rwxr-xr-xtools/isolate/shard_test_cases.py49
-rw-r--r--tools/isolate/tests/gtest_fake/__init__.py0
-rw-r--r--tools/isolate/tests/gtest_fake/gtest_fake_base.py36
-rwxr-xr-xtools/isolate/tests/gtest_fake/gtest_fake_error.py32
-rwxr-xr-xtools/isolate/tests/gtest_fake/gtest_fake_fail.py57
-rwxr-xr-xtools/isolate/tests/gtest_fake/gtest_fake_pass.py55
-rw-r--r--tools/isolate/tests/isolate/fail.isolate14
-rwxr-xr-xtools/isolate/tests/isolate/fail.py15
-rw-r--r--tools/isolate/tests/isolate/files1/subdir/42.txt1
-rw-r--r--tools/isolate/tests/isolate/files1/test_file1.txt1
-rw-r--r--tools/isolate/tests/isolate/files1/test_file2.txt1
l---------tools/isolate/tests/isolate/files21
-rw-r--r--tools/isolate/tests/isolate/missing_trailing_slash.isolate11
-rw-r--r--tools/isolate/tests/isolate/no_run.isolate14
-rw-r--r--tools/isolate/tests/isolate/non_existent.isolate10
-rw-r--r--tools/isolate/tests/isolate/symlink_full.isolate17
-rwxr-xr-xtools/isolate/tests/isolate/symlink_full.py39
-rw-r--r--tools/isolate/tests/isolate/symlink_partial.isolate17
-rwxr-xr-xtools/isolate/tests/isolate/symlink_partial.py21
-rw-r--r--tools/isolate/tests/isolate/touch_only.isolate18
-rwxr-xr-xtools/isolate/tests/isolate/touch_only.py31
-rw-r--r--tools/isolate/tests/isolate/touch_root.isolate15
-rwxr-xr-xtools/isolate/tests/isolate/touch_root.py24
-rw-r--r--tools/isolate/tests/isolate/with_flag.isolate18
-rwxr-xr-xtools/isolate/tests/isolate/with_flag.py54
-rwxr-xr-xtools/isolate/tests/isolate_smoke_test.py1001
-rwxr-xr-xtools/isolate/tests/isolate_test.py836
-rwxr-xr-xtools/isolate/tests/run_test_cases/sleep.py22
-rwxr-xr-xtools/isolate/tests/run_test_cases_smoke_test.py172
-rwxr-xr-xtools/isolate/tests/run_test_cases_test.py193
-rwxr-xr-xtools/isolate/tests/run_test_from_archive/check_files.py39
-rw-r--r--tools/isolate/tests/run_test_from_archive/check_files.results15
-rw-r--r--tools/isolate/tests/run_test_from_archive/file1.txt1
-rw-r--r--tools/isolate/tests/run_test_from_archive/file1_copy.txt1
-rw-r--r--tools/isolate/tests/run_test_from_archive/file2.txt1
-rw-r--r--tools/isolate/tests/run_test_from_archive/file3.txt1
-rwxr-xr-xtools/isolate/tests/run_test_from_archive/gtest_fake.py84
-rw-r--r--tools/isolate/tests/run_test_from_archive/gtest_fake.results8
-rw-r--r--tools/isolate/tests/run_test_from_archive/manifest1.results7
-rw-r--r--tools/isolate/tests/run_test_from_archive/manifest2.results8
-rwxr-xr-xtools/isolate/tests/run_test_from_archive/repeated_files.py32
-rw-r--r--tools/isolate/tests/run_test_from_archive/repeated_files.results14
-rwxr-xr-xtools/isolate/tests/run_test_from_archive_smoke_test.py230
-rwxr-xr-xtools/isolate/tests/run_test_from_archive_test.py116
-rwxr-xr-xtools/isolate/tests/trace_inputs/child1.py50
-rwxr-xr-xtools/isolate/tests/trace_inputs/child2.py44
-rw-r--r--tools/isolate/tests/trace_inputs/files1/bar1
-rw-r--r--tools/isolate/tests/trace_inputs/files1/do_not_care.txt1
-rw-r--r--tools/isolate/tests/trace_inputs/files1/foo1
l---------tools/isolate/tests/trace_inputs/files21
-rw-r--r--tools/isolate/tests/trace_inputs/ignored.txt1
-rwxr-xr-xtools/isolate/tests/trace_inputs/symlink.py37
-rw-r--r--tools/isolate/tests/trace_inputs/test_file.txt1
-rwxr-xr-xtools/isolate/tests/trace_inputs/touch_only.py30
-rwxr-xr-xtools/isolate/tests/trace_inputs_smoke_test.py614
-rwxr-xr-xtools/isolate/tests/trace_inputs_test.py448
-rwxr-xr-xtools/isolate/tests/trace_test_cases_smoke_test.py123
-rwxr-xr-xtools/isolate/trace_inputs.py3258
-rwxr-xr-xtools/isolate/trace_test_cases.py127
74 files changed, 11 insertions, 12288 deletions
diff --git a/.gitignore b/.gitignore
index aa3b608..1a323fd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -245,6 +245,7 @@ v8.log
/tools/histograms
/tools/json_schema_compiler/test/json_schema_compiler_tests.xml
/tools/page_cycler/acid3
+/tools/swarm_client
/tools/tryserver
/tools/win/link_limiter/build
/ui/resources/aura/google_wallpapers
diff --git a/DEPS b/DEPS
index 96211f0..b7f1281 100644
--- a/DEPS
+++ b/DEPS
@@ -105,6 +105,9 @@ deps = {
"src/tools/gyp":
(Var("googlecode_url") % "gyp") + "/trunk@1508",
+ "src/tools/swarm_client":
+ "/trunk/tools/swarm_client@159961",
+
"src/v8":
(Var("googlecode_url") % "v8") + "/trunk@" + Var("v8_revision"),
diff --git a/base/base.gyp b/base/base.gyp
index 0904813..c501dfb 100644
--- a/base/base.gyp
+++ b/base/base.gyp
@@ -1046,7 +1046,7 @@
],
'action': [
'python',
- '../tools/isolate/isolate.py',
+ '../tools/swarm_client/isolate.py',
'<(test_isolation_mode)',
'--outdir', '<(test_isolation_outdir)',
'--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
diff --git a/build/common.gypi b/build/common.gypi
index 829ed4a..91a4d89 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -374,8 +374,8 @@
# Whether tests targets should be run, archived or just have the
# dependencies verified. All the tests targets have the '_run' suffix,
# e.g. base_unittests_run runs the target base_unittests. The test target
- # always calls tools/isolate/isolate.py. See the script's --help for more
- # information and the valid --mode values. Meant to be overriden with
+ # always calls tools/swarm_client/isolate.py. See the script's --help for
+ # more information and the valid --mode values. Meant to be overriden with
# GYP_DEFINES.
# TODO(maruel): Converted the default from 'check' to 'noop' so work can
# be done while the builders are being reconfigured to check out test data
diff --git a/chrome/chrome_tests.gypi b/chrome/chrome_tests.gypi
index debd48b..fc0451f 100644
--- a/chrome/chrome_tests.gypi
+++ b/chrome/chrome_tests.gypi
@@ -4825,7 +4825,7 @@
],
'action': [
'python',
- '../tools/isolate/isolate.py',
+ '../tools/swarm_client/isolate.py',
'<(test_isolation_mode)',
'--outdir', '<(test_isolation_outdir)',
'--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
@@ -4856,7 +4856,7 @@
],
'action': [
'python',
- '../tools/isolate/isolate.py',
+ '../tools/swarm_client/isolate.py',
'<(test_isolation_mode)',
'--outdir', '<(test_isolation_outdir)',
'--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
diff --git a/net/net.gyp b/net/net.gyp
index c1ad190..168babf 100644
--- a/net/net.gyp
+++ b/net/net.gyp
@@ -2277,7 +2277,7 @@
],
'action': [
'python',
- '../tools/isolate/isolate.py',
+ '../tools/swarm_client/isolate.py',
'<(test_isolation_mode)',
'--outdir', '<(test_isolation_outdir)',
'--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
diff --git a/third_party/cacheinvalidation/cacheinvalidation.gyp b/third_party/cacheinvalidation/cacheinvalidation.gyp
index 77e3c52..6a663ad 100644
--- a/third_party/cacheinvalidation/cacheinvalidation.gyp
+++ b/third_party/cacheinvalidation/cacheinvalidation.gyp
@@ -168,7 +168,7 @@
],
'action': [
'python',
- '../../tools/isolate/isolate.py',
+ '../../tools/swarm_client/isolate.py',
'<(test_isolation_mode)',
'--outdir', '<(test_isolation_outdir)',
'--variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
diff --git a/tools/isolate/PRESUBMIT.py b/tools/isolate/PRESUBMIT.py
deleted file mode 100644
index fb95592..0000000
--- a/tools/isolate/PRESUBMIT.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Top-level presubmit script for isolate.
-
-See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
-details on the presubmit API built into gcl.
-"""
-
-
-def CommonChecks(input_api, output_api):
- output = []
- output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
- output.extend(
- input_api.canned_checks.RunUnitTestsInDirectory(
- input_api, output_api,
- input_api.os_path.join(input_api.PresubmitLocalPath(), 'tests'),
- whitelist=[r'.+_test\.py$']))
- return output
-
-
-def CheckChangeOnUpload(input_api, output_api):
- return CommonChecks(input_api, output_api)
-
-
-def CheckChangeOnCommit(input_api, output_api):
- return CommonChecks(input_api, output_api)
diff --git a/tools/isolate/README.py b/tools/isolate/README.py
deleted file mode 100755
index 2cd4cca..0000000
--- a/tools/isolate/README.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-###
-# Run me to generate the documentation!
-###
-
-"""Test tracing and isolation infrastructure.
-
-Scripts are compartmentalized by their name:
-- trace_*.py: Tracing infrastructure scripts.
-- isolate_*.py: Executable isolation scripts. (TODO)
-- *_test_cases.py: Scripts specifically managing GTest executables.
-
-A few scripts have strict dependency rules:
-- run_test_cases.py, run_test_from_archive.py, shard.py and trace_inputs.py
- depends on no other script so they can be run outside the checkout.
-- The pure tracing scripts (trace_inputs.py and trace_test_cases.py) do not know
- about isolate infrastructure.
-- Scripts without _test_cases suffix do not know about GTest.
-- Scripts without isolate_ prefix do not know about the isolation
- infrastructure. (TODO)
-
-See http://dev.chromium.org/developers/testing/isolated-testing for more info.
-"""
-
-import os
-import sys
-
-
-def main():
- for i in sorted(os.listdir(os.path.dirname(os.path.abspath(__file__)))):
- if not i.endswith('.py') or i == 'PRESUBMIT.py':
- continue
- module = __import__(i[:-3])
- if hasattr(module, '__doc__'):
- print module.__name__
- print ''.join(' %s\n' % i for i in module.__doc__.splitlines())
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/fix_test_cases.py b/tools/isolate/fix_test_cases.py
deleted file mode 100755
index 98fcdbc..0000000
--- a/tools/isolate/fix_test_cases.py
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs a test, grab the failures and trace them."""
-
-import json
-import os
-import subprocess
-import sys
-import tempfile
-
-import run_test_cases
-
-
-XVFB_PATH = os.path.join('..', '..', 'testing', 'xvfb.py')
-
-
-if sys.platform == 'win32':
- import msvcrt # pylint: disable=F0401
-
- def get_keyboard():
- """Returns a letter from the keyboard if any.
-
- This function returns immediately.
- """
- if msvcrt.kbhit():
- return ord(msvcrt.getch())
-
-else:
- import select
-
- def get_keyboard():
- """Returns a letter from the keyboard if any, as soon as he pressed enter.
-
- This function returns (almost) immediately.
-
- The library doesn't give a way to just get the initial letter.
- """
- if select.select([sys.stdin], [], [], 0.00001)[0]:
- return sys.stdin.read(1)
-
-
-def trace_and_merge(result, test):
- """Traces a single test case and merges the result back into .isolate."""
- env = os.environ.copy()
- env['RUN_TEST_CASES_RUN_ALL'] = '1'
-
- print 'Starting trace of %s' % test
- subprocess.call(
- [
- sys.executable, 'isolate.py', 'trace', '-r', result,
- '--', '--gtest_filter=' + test,
- ],
- env=env)
-
- print 'Starting merge of %s' % test
- return not subprocess.call(
- [sys.executable, 'isolate.py', 'merge', '-r', result])
-
-
-def run_all(result, shard_index, shard_count):
- """Runs all the tests. Returns the tests that failed or None on failure.
-
- Assumes run_test_cases.py is implicitly called.
- """
- handle, result_file = tempfile.mkstemp(prefix='run_test_cases')
- os.close(handle)
- env = os.environ.copy()
- env['RUN_TEST_CASES_RESULT_FILE'] = result_file
- env['RUN_TEST_CASES_RUN_ALL'] = '1'
- env['GTEST_SHARD_INDEX'] = str(shard_index)
- env['GTEST_TOTAL_SHARDS'] = str(shard_count)
- cmd = [sys.executable, 'isolate.py', 'run', '-r', result]
- subprocess.call(cmd, env=env)
- if not os.path.isfile(result_file):
- print >> sys.stderr, 'Failed to find %s' % result_file
- return None
- with open(result_file) as f:
- try:
- data = json.load(f)
- except ValueError as e:
- print >> sys.stderr, ('Unable to load json file, %s: %s' %
- (result_file, str(e)))
- return None
- os.remove(result_file)
- return [
- test for test, runs in data.iteritems()
- if not any(not run['returncode'] for run in runs)
- ]
-
-
-def run(result, test):
- """Runs a single test case in an isolated environment.
-
- Returns True if the test passed.
- """
- return not subprocess.call([
- sys.executable, 'isolate.py', 'run', '-r', result,
- '--', '--gtest_filter=' + test,
- ])
-
-
-def run_normally(executable, test):
- return not subprocess.call([
- sys.executable, XVFB_PATH, os.path.dirname(executable), executable,
- '--gtest_filter=' + test])
-
-
-def diff_and_commit(test):
- """Prints the diff and commit."""
- subprocess.call(['git', 'diff'])
- subprocess.call(['git', 'commit', '-a', '-m', test])
-
-
-def trace_and_verify(result, test):
- """Traces a test case, updates .isolate and makes sure it passes afterward.
-
- Return None if the test was already passing, True on success.
- """
- trace_and_merge(result, test)
- diff_and_commit(test)
- print 'Verifying trace...'
- return run(result, test)
-
-
-def fix_all(result, shard_index, shard_count, executable):
- """Runs all the test cases in a gtest executable and trace the failing tests.
-
- Returns True on success.
-
- Makes sure the test passes afterward.
- """
- # These could have adverse side-effects.
- # TODO(maruel): Be more intelligent about it, for now be safe.
- run_test_cases_env = ['RUN_TEST_CASES_RESULT_FILE', 'RUN_TEST_CASES_RUN_ALL']
- for i in run_test_cases.KNOWN_GTEST_ENV_VARS + run_test_cases_env:
- if i in os.environ:
- print >> 'Please unset %s' % i
- return False
-
- test_cases = run_all(result, shard_index, shard_count)
- if test_cases is None:
- return False
-
- print '\nFound %d broken test cases.' % len(test_cases)
- if not test_cases:
- return True
-
- failed_alone = []
- failures = []
- fixed_tests = []
- try:
- for index, test_case in enumerate(test_cases):
- if get_keyboard():
- # Return early.
- return True
-
- try:
- # Check if the test passes normally, because otherwise there is no
- # reason to trace its failure.
- if not run_normally(executable, test_case):
- print '%s is broken when run alone, please fix the test.' % test_case
- failed_alone.append(test_case)
- continue
-
- if not trace_and_verify(result, test_case):
- failures.append(test_case)
- print 'Failed to fix %s' % test_case
- else:
- fixed_tests.append(test_case)
- except: # pylint: disable=W0702
- failures.append(test_case)
- print 'Failed to fix %s' % test_case
- print '%d/%d' % (index+1, len(test_cases))
- finally:
- print 'Test cases fixed (%d):' % len(fixed_tests)
- for fixed_test in fixed_tests:
- print ' %s' % fixed_test
- print ''
-
- print 'Test cases still failing (%d):' % len(failures)
- for failure in failures:
- print ' %s' % failure
-
- if failed_alone:
- print ('Test cases that failed normally when run alone (%d):' %
- len(failed_alone))
- for failed in failed_alone:
- print failed
- return not failures
-
-
-def main():
- parser = run_test_cases.OptionParserWithTestSharding(
- usage='%prog <option> [test]')
- parser.add_option('-d', '--dir', default='../../out/Release',
- help='The directory containing the the test executable and '
- 'result file. Defaults to %default')
- options, args = parser.parse_args()
-
- if len(args) != 1:
- parser.error('Use with the name of the test only, e.g. unit_tests')
-
- basename = args[0]
- executable = os.path.join(options.dir, basename)
- result = '%s.results' % executable
- if sys.platform in('win32', 'cygwin'):
- executable += '.exe'
- if not os.path.isfile(executable):
- print >> sys.stderr, (
- '%s doesn\'t exist, please build %s_run' % (executable, basename))
- return 1
- if not os.path.isfile(result):
- print >> sys.stderr, (
- '%s doesn\'t exist, please build %s_run' % (result, basename))
- return 1
-
- return not fix_all(result, options.index, options.shards, executable)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/isolate.py b/tools/isolate/isolate.py
deleted file mode 100755
index 57c8aee..0000000
--- a/tools/isolate/isolate.py
+++ /dev/null
@@ -1,1923 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Front end tool to manage .isolate files and corresponding tests.
-
-Run ./isolate.py --help for more detailed information.
-
-See more information at
-http://dev.chromium.org/developers/testing/isolated-testing
-"""
-
-import binascii
-import copy
-import hashlib
-import logging
-import optparse
-import os
-import posixpath
-import re
-import stat
-import subprocess
-import sys
-import time
-import urllib
-import urllib2
-
-import trace_inputs
-import run_test_from_archive
-from run_test_from_archive import get_flavor
-
-# Used by process_input().
-NO_INFO, STATS_ONLY, WITH_HASH = range(56, 59)
-SHA_1_NULL = hashlib.sha1().hexdigest()
-
-PATH_VARIABLES = ('DEPTH', 'PRODUCT_DIR')
-DEFAULT_OSES = ('linux', 'mac', 'win')
-
-# Files that should be 0-length when mapped.
-KEY_TOUCHED = 'isolate_dependency_touched'
-# Files that should be tracked by the build tool.
-KEY_TRACKED = 'isolate_dependency_tracked'
-# Files that should not be tracked by the build tool.
-KEY_UNTRACKED = 'isolate_dependency_untracked'
-
-_GIT_PATH = os.path.sep + '.git'
-_SVN_PATH = os.path.sep + '.svn'
-
-# The maximum number of upload attempts to try when uploading a single file.
-MAX_UPLOAD_ATTEMPTS = 5
-
-# The minimum size of files to upload directly to the blobstore.
-MIN_SIZE_FOR_DIRECT_BLOBSTORE = 20 * 8
-
-
-class ExecutionError(Exception):
- """A generic error occurred."""
- def __str__(self):
- return self.args[0]
-
-
-### Path handling code.
-
-
-def relpath(path, root):
- """os.path.relpath() that keeps trailing os.path.sep."""
- out = os.path.relpath(path, root)
- if path.endswith(os.path.sep):
- out += os.path.sep
- return out
-
-
-def normpath(path):
- """os.path.normpath() that keeps trailing os.path.sep."""
- out = os.path.normpath(path)
- if path.endswith(os.path.sep):
- out += os.path.sep
- return out
-
-
-def posix_relpath(path, root):
- """posix.relpath() that keeps trailing slash."""
- out = posixpath.relpath(path, root)
- if path.endswith('/'):
- out += '/'
- return out
-
-
-def cleanup_path(x):
- """Cleans up a relative path. Converts any os.path.sep to '/' on Windows."""
- if x:
- x = x.rstrip(os.path.sep).replace(os.path.sep, '/')
- if x == '.':
- x = ''
- if x:
- x += '/'
- return x
-
-
-def default_blacklist(f):
- """Filters unimportant files normally ignored."""
- return (
- f.endswith(('.pyc', '.run_test_cases', 'testserver.log')) or
- _GIT_PATH in f or
- _SVN_PATH in f or
- f in ('.git', '.svn'))
-
-
-def expand_directory_and_symlink(indir, relfile, blacklist):
- """Expands a single input. It can result in multiple outputs.
-
- This function is recursive when relfile is a directory or a symlink.
-
- Note: this code doesn't properly handle recursive symlink like one created
- with:
- ln -s .. foo
- """
- if os.path.isabs(relfile):
- raise run_test_from_archive.MappingError(
- 'Can\'t map absolute path %s' % relfile)
-
- infile = normpath(os.path.join(indir, relfile))
- if not infile.startswith(indir):
- raise run_test_from_archive.MappingError(
- 'Can\'t map file %s outside %s' % (infile, indir))
-
- if sys.platform != 'win32':
- # Look if any item in relfile is a symlink.
- base, symlink, rest = trace_inputs.split_at_symlink(indir, relfile)
- if symlink:
- # Append everything pointed by the symlink. If the symlink is recursive,
- # this code blows up.
- symlink_relfile = os.path.join(base, symlink)
- symlink_path = os.path.join(indir, symlink_relfile)
- pointed = os.readlink(symlink_path)
- dest_infile = normpath(
- os.path.join(os.path.dirname(symlink_path), pointed))
- if rest:
- dest_infile = trace_inputs.safe_join(dest_infile, rest)
- if not dest_infile.startswith(indir):
- raise run_test_from_archive.MappingError(
- 'Can\'t map symlink reference %s (from %s) ->%s outside of %s' %
- (symlink_relfile, relfile, dest_infile, indir))
- if infile.startswith(dest_infile):
- raise run_test_from_archive.MappingError(
- 'Can\'t map recursive symlink reference %s->%s' %
- (symlink_relfile, dest_infile))
- dest_relfile = dest_infile[len(indir)+1:]
- logging.info('Found symlink: %s -> %s' % (symlink_relfile, dest_relfile))
- out = expand_directory_and_symlink(indir, dest_relfile, blacklist)
- # Add the symlink itself.
- out.append(symlink_relfile)
- return out
-
- if relfile.endswith(os.path.sep):
- if not os.path.isdir(infile):
- raise run_test_from_archive.MappingError(
- '%s is not a directory but ends with "%s"' % (infile, os.path.sep))
-
- outfiles = []
- for filename in os.listdir(infile):
- inner_relfile = os.path.join(relfile, filename)
- if blacklist(inner_relfile):
- continue
- if os.path.isdir(os.path.join(indir, inner_relfile)):
- inner_relfile += os.path.sep
- outfiles.extend(
- expand_directory_and_symlink(indir, inner_relfile, blacklist))
- return outfiles
- else:
- # Always add individual files even if they were blacklisted.
- if os.path.isdir(infile):
- raise run_test_from_archive.MappingError(
- 'Input directory %s must have a trailing slash' % infile)
-
- if not os.path.isfile(infile):
- raise run_test_from_archive.MappingError(
- 'Input file %s doesn\'t exist' % infile)
-
- return [relfile]
-
-
-def expand_directories_and_symlinks(indir, infiles, blacklist):
- """Expands the directories and the symlinks, applies the blacklist and
- verifies files exist.
-
- Files are specified in os native path separator.
- """
- outfiles = []
- for relfile in infiles:
- outfiles.extend(expand_directory_and_symlink(indir, relfile, blacklist))
- return outfiles
-
-
-def recreate_tree(outdir, indir, infiles, action, as_sha1):
- """Creates a new tree with only the input files in it.
-
- Arguments:
- outdir: Output directory to create the files in.
- indir: Root directory the infiles are based in.
- infiles: dict of files to map from |indir| to |outdir|.
- action: See assert below.
- as_sha1: Output filename is the sha1 instead of relfile.
- """
- logging.info(
- 'recreate_tree(outdir=%s, indir=%s, files=%d, action=%s, as_sha1=%s)' %
- (outdir, indir, len(infiles), action, as_sha1))
-
- assert action in (
- run_test_from_archive.HARDLINK,
- run_test_from_archive.SYMLINK,
- run_test_from_archive.COPY)
- outdir = os.path.normpath(outdir)
- if not os.path.isdir(outdir):
- logging.info ('Creating %s' % outdir)
- os.makedirs(outdir)
- # Do not call abspath until the directory exists.
- outdir = os.path.abspath(outdir)
-
- for relfile, metadata in infiles.iteritems():
- infile = os.path.join(indir, relfile)
- if as_sha1:
- # Do the hashtable specific checks.
- if 'link' in metadata:
- # Skip links when storing a hashtable.
- continue
- outfile = os.path.join(outdir, metadata['sha-1'])
- if os.path.isfile(outfile):
- # Just do a quick check that the file size matches. No need to stat()
- # again the input file, grab the value from the dict.
- if metadata['size'] == os.stat(outfile).st_size:
- continue
- else:
- logging.warn('Overwritting %s' % metadata['sha-1'])
- os.remove(outfile)
- else:
- outfile = os.path.join(outdir, relfile)
- outsubdir = os.path.dirname(outfile)
- if not os.path.isdir(outsubdir):
- os.makedirs(outsubdir)
-
- # TODO(csharp): Fix crbug.com/150823 and enable the touched logic again.
- # if metadata.get('touched_only') == True:
- # open(outfile, 'ab').close()
- if 'link' in metadata:
- pointed = metadata['link']
- logging.debug('Symlink: %s -> %s' % (outfile, pointed))
- os.symlink(pointed, outfile)
- else:
- run_test_from_archive.link_file(outfile, infile, action)
-
-
-def encode_multipart_formdata(fields, files,
- mime_mapper=lambda _: 'application/octet-stream'):
- """Encodes a Multipart form data object.
-
- Args:
- fields: a sequence (name, value) elements for
- regular form fields.
- files: a sequence of (name, filename, value) elements for data to be
- uploaded as files.
- mime_mapper: function to return the mime type from the filename.
- Returns:
- content_type: for httplib.HTTP instance
- body: for httplib.HTTP instance
- """
- boundary = hashlib.md5(str(time.time())).hexdigest()
- body_list = []
- for (key, value) in fields:
- body_list.append('--' + boundary)
- body_list.append('Content-Disposition: form-data; name="%s"' % key)
- body_list.append('')
- body_list.append(value)
- body_list.append('--' + boundary)
- body_list.append('')
- for (key, filename, value) in files:
- body_list.append('--' + boundary)
- body_list.append('Content-Disposition: form-data; name="%s"; '
- 'filename="%s"' % (key, filename))
- body_list.append('Content-Type: %s' % mime_mapper(filename))
- body_list.append('')
- body_list.append(value)
- body_list.append('--' + boundary)
- body_list.append('')
- if body_list:
- body_list[-2] += '--'
- body = '\r\n'.join(body_list)
- content_type = 'multipart/form-data; boundary=%s' % boundary
- return content_type, body
-
-
-def upload_hash_content(url, params=None, payload=None,
- content_type='application/octet-stream'):
- """Uploads the given hash contents.
-
- Arguments:
- url: The url to upload the hash contents to.
- params: The params to include with the upload.
- payload: The data to upload.
- content_type: The content_type of the data being uploaded.
- """
- if params:
- url = url + '?' + urllib.urlencode(params)
- request = urllib2.Request(url, data=payload)
- request.add_header('Content-Type', content_type)
- request.add_header('Content-Length', len(payload or ''))
-
- return urllib2.urlopen(request)
-
-
-def upload_hash_content_to_blobstore(generate_upload_url, params,
- hash_data):
- """Uploads the given hash contents directly to the blobsotre via a generated
- url.
-
- Arguments:
- generate_upload_url: The url to get the new upload url from.
- params: The params to include with the upload.
- hash_contents: The contents to upload.
- """
- content_type, body = encode_multipart_formdata(
- params.items(), [('hash_contents', 'hash_contest', hash_data)])
-
- logging.debug('Generating url to directly upload file to blobstore')
- response = urllib2.urlopen(generate_upload_url)
- upload_url = response.read()
-
- if not upload_url:
- logging.error('Unable to generate upload url')
- return
-
- return upload_hash_content(upload_url, payload=body,
- content_type=content_type)
-
-
-class UploadRemote(run_test_from_archive.Remote):
- @staticmethod
- def get_file_handler(base_url):
- def upload_file(hash_data, hash_key):
- params = {'hash_key': hash_key}
- if len(hash_data) > MIN_SIZE_FOR_DIRECT_BLOBSTORE:
- upload_hash_content_to_blobstore(
- base_url.rstrip('/') + '/content/generate_blobstore_url',
- params, hash_data)
- else:
- upload_hash_content(
- base_url.rstrip('/') + '/content/store', params, hash_data)
- return upload_file
-
-
-def url_open(url, data=None, max_retries=MAX_UPLOAD_ATTEMPTS):
- """Opens the given url with the given data, repeating up to max_retries
- times if it encounters an error.
-
- Arguments:
- url: The url to open.
- data: The data to send to the url.
- max_retries: The maximum number of times to try connecting to the url.
-
- Returns:
- The response from the url, or it raises an exception it it failed to get
- a response.
- """
- for _ in range(max_retries):
- try:
- response = urllib2.urlopen(url, data=data)
- except urllib2.URLError as e:
- logging.warning('Unable to connect to %s, error msg: %s', url, e)
- time.sleep(1)
-
- # If we get no response from the server after max_retries, assume it
- # is down and raise an exception
- if response is None:
- raise run_test_from_archive.MappingError('Unable to connect to server, %s, '
- 'to see which files are presents' %
- url)
-
- return response
-
-
-def update_files_to_upload(query_url, queries, files_to_upload):
- """Queries the server to see which files from this batch already exist there.
-
- Arguments:
- queries: The hash files to potential upload to the server.
- files_to_upload: Any new files that need to be upload are added to
- this list.
- """
- body = ''.join(
- (binascii.unhexlify(meta_data['sha-1']) for (_, meta_data) in queries))
- response = url_open(query_url, data=body).read()
- if len(queries) != len(response):
- raise run_test_from_archive.MappingError(
- 'Got an incorrect number of responses from the server. Expected %d, '
- 'but got %d' % (len(queries), len(response)))
-
- for i in range(len(response)):
- if response[i] == chr(0):
- files_to_upload.append(queries[i])
- else:
- logging.debug('Hash for %s already exists on the server, no need '
- 'to upload again', queries[i][0])
-
-
-def upload_sha1_tree(base_url, indir, infiles):
- """Uploads the given tree to the given url.
-
- Arguments:
- base_url: The base url, it is assume that |base_url|/has/ can be used to
- query if an element was already uploaded, and |base_url|/store/
- can be used to upload a new element.
- indir: Root directory the infiles are based in.
- infiles: dict of files to map from |indir| to |outdir|.
- """
- logging.info('upload tree(base_url=%s, indir=%s, files=%d)' %
- (base_url, indir, len(infiles)))
-
- # Generate the list of files that need to be uploaded (since some may already
- # be on the server.
- base_url = base_url.rstrip('/')
- contains_hash_url = base_url + '/content/contains'
- to_upload = []
- next_queries = []
- for relfile, metadata in infiles.iteritems():
- if 'link' in metadata:
- # Skip links when uploading.
- continue
-
- next_queries.append((relfile, metadata))
- if len(next_queries) == 1000:
- update_files_to_upload(contains_hash_url, next_queries, to_upload)
- next_queries = []
-
- if next_queries:
- update_files_to_upload(contains_hash_url, next_queries, to_upload)
-
-
- # Upload the required files.
- remote_uploader = UploadRemote(base_url)
- for relfile, metadata in to_upload:
- # TODO(csharp): Fix crbug.com/150823 and enable the touched logic again.
- # if metadata.get('touched_only') == True:
- # hash_data = ''
- infile = os.path.join(indir, relfile)
- with open(infile, 'rb') as f:
- hash_data = f.read()
- remote_uploader.add_item(run_test_from_archive.Remote.MED,
- hash_data,
- metadata['sha-1'])
- remote_uploader.join()
-
- exception = remote_uploader.next_exception()
- if exception:
- while exception:
- logging.error('Error uploading file to server:\n%s', exception[1])
- exception = remote_uploader.next_exception()
- raise run_test_from_archive.MappingError(
- 'Encountered errors uploading hash contents to server. See logs for '
- 'exact failures')
-
-
-def process_input(filepath, prevdict, level, read_only):
- """Processes an input file, a dependency, and return meta data about it.
-
- Arguments:
- - filepath: File to act on.
- - prevdict: the previous dictionary. It is used to retrieve the cached sha-1
- to skip recalculating the hash.
- - level: determines the amount of information retrieved.
- - read_only: If True, the file mode is manipulated. In practice, only save
- one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On
- windows, mode is not set since all files are 'executable' by
- default.
-
- Behaviors:
- - NO_INFO retrieves no information.
- - STATS_ONLY retrieves the file mode, file size, file timestamp, file link
- destination if it is a file link.
- - WITH_HASH retrieves all of STATS_ONLY plus the sha-1 of the content of the
- file.
- """
- assert level in (NO_INFO, STATS_ONLY, WITH_HASH)
- out = {}
- # TODO(csharp): Fix crbug.com/150823 and enable the touched logic again.
- # if prevdict.get('touched_only') == True:
- # # The file's content is ignored. Skip the time and hard code mode.
- # if get_flavor() != 'win':
- # out['mode'] = stat.S_IRUSR | stat.S_IRGRP
- # out['size'] = 0
- # out['sha-1'] = SHA_1_NULL
- # out['touched_only'] = True
- # return out
-
- if level >= STATS_ONLY:
- try:
- filestats = os.lstat(filepath)
- except OSError:
- # The file is not present.
- raise run_test_from_archive.MappingError('%s is missing' % filepath)
- is_link = stat.S_ISLNK(filestats.st_mode)
- if get_flavor() != 'win':
- # Ignore file mode on Windows since it's not really useful there.
- filemode = stat.S_IMODE(filestats.st_mode)
- # Remove write access for group and all access to 'others'.
- filemode &= ~(stat.S_IWGRP | stat.S_IRWXO)
- if read_only:
- filemode &= ~stat.S_IWUSR
- if filemode & stat.S_IXUSR:
- filemode |= stat.S_IXGRP
- else:
- filemode &= ~stat.S_IXGRP
- out['mode'] = filemode
- if not is_link:
- out['size'] = filestats.st_size
- # Used to skip recalculating the hash. Use the most recent update time.
- out['timestamp'] = int(round(filestats.st_mtime))
- # If the timestamp wasn't updated, carry on the sha-1.
- if prevdict.get('timestamp') == out['timestamp']:
- if 'sha-1' in prevdict:
- # Reuse the previous hash.
- out['sha-1'] = prevdict['sha-1']
- if 'link' in prevdict:
- # Reuse the previous link destination.
- out['link'] = prevdict['link']
- if is_link and not 'link' in out:
- # A symlink, store the link destination.
- out['link'] = os.readlink(filepath)
-
- if level >= WITH_HASH and not out.get('sha-1') and not out.get('link'):
- if not is_link:
- with open(filepath, 'rb') as f:
- out['sha-1'] = hashlib.sha1(f.read()).hexdigest()
- return out
-
-
-### Variable stuff.
-
-
-def result_to_state(filename):
- """Replaces the file's extension."""
- return filename.rsplit('.', 1)[0] + '.state'
-
-
-def determine_root_dir(relative_root, infiles):
- """For a list of infiles, determines the deepest root directory that is
- referenced indirectly.
-
- All arguments must be using os.path.sep.
- """
- # The trick used to determine the root directory is to look at "how far" back
- # up it is looking up.
- deepest_root = relative_root
- for i in infiles:
- x = relative_root
- while i.startswith('..' + os.path.sep):
- i = i[3:]
- assert not i.startswith(os.path.sep)
- x = os.path.dirname(x)
- if deepest_root.startswith(x):
- deepest_root = x
- logging.debug(
- 'determine_root_dir(%s, %d files) -> %s' % (
- relative_root, len(infiles), deepest_root))
- return deepest_root
-
-
-def replace_variable(part, variables):
- m = re.match(r'<\(([A-Z_]+)\)', part)
- if m:
- if m.group(1) not in variables:
- raise ExecutionError(
- 'Variable "%s" was not found in %s.\nDid you forget to specify '
- '--variable?' % (m.group(1), variables))
- return variables[m.group(1)]
- return part
-
-
-def process_variables(variables, relative_base_dir):
- """Processes path variables as a special case and returns a copy of the dict.
-
- For each 'path' variable: first normalizes it, verifies it exists, converts it
- to an absolute path, then sets it as relative to relative_base_dir.
- """
- variables = variables.copy()
- for i in PATH_VARIABLES:
- if i not in variables:
- continue
- variable = os.path.normpath(variables[i])
- if not os.path.isdir(variable):
- raise ExecutionError('%s=%s is not a directory' % (i, variable))
- # Variables could contain / or \ on windows. Always normalize to
- # os.path.sep.
- variable = os.path.abspath(variable.replace('/', os.path.sep))
- # All variables are relative to the .isolate file.
- variables[i] = os.path.relpath(variable, relative_base_dir)
- return variables
-
-
-def eval_variables(item, variables):
- """Replaces the .isolate variables in a string item.
-
- Note that the .isolate format is a subset of the .gyp dialect.
- """
- return ''.join(
- replace_variable(p, variables) for p in re.split(r'(<\([A-Z_]+\))', item))
-
-
-def classify_files(root_dir, tracked, untracked):
- """Converts the list of files into a .isolate 'variables' dictionary.
-
- Arguments:
- - tracked: list of files names to generate a dictionary out of that should
- probably be tracked.
- - untracked: list of files names that must not be tracked.
- """
- # These directories are not guaranteed to be always present on every builder.
- OPTIONAL_DIRECTORIES = (
- 'test/data/plugin',
- 'third_party/WebKit/LayoutTests',
- )
-
- new_tracked = []
- new_untracked = list(untracked)
-
- def should_be_tracked(filepath):
- """Returns True if it is a file without whitespace in a non-optional
- directory that has no symlink in its path.
- """
- if filepath.endswith('/'):
- return False
- if ' ' in filepath:
- return False
- if any(i in filepath for i in OPTIONAL_DIRECTORIES):
- return False
- # Look if any element in the path is a symlink.
- split = filepath.split('/')
- for i in range(len(split)):
- if os.path.islink(os.path.join(root_dir, '/'.join(split[:i+1]))):
- return False
- return True
-
- for filepath in sorted(tracked):
- if should_be_tracked(filepath):
- new_tracked.append(filepath)
- else:
- # Anything else.
- new_untracked.append(filepath)
-
- variables = {}
- if new_tracked:
- variables[KEY_TRACKED] = sorted(new_tracked)
- if new_untracked:
- variables[KEY_UNTRACKED] = sorted(new_untracked)
- return variables
-
-
-def generate_simplified(
- tracked, untracked, touched, root_dir, variables, relative_cwd):
- """Generates a clean and complete .isolate 'variables' dictionary.
-
- Cleans up and extracts only files from within root_dir then processes
- variables and relative_cwd.
- """
- logging.info(
- 'generate_simplified(%d files, %s, %s, %s)' %
- (len(tracked) + len(untracked) + len(touched),
- root_dir, variables, relative_cwd))
- # Constants.
- # Skip log in PRODUCT_DIR. Note that these are applied on '/' style path
- # separator.
- LOG_FILE = re.compile(r'^\<\(PRODUCT_DIR\)\/[^\/]+\.log$')
- EXECUTABLE = re.compile(
- r'^(\<\(PRODUCT_DIR\)\/[^\/\.]+)' +
- re.escape(variables.get('EXECUTABLE_SUFFIX', '')) +
- r'$')
-
- # Preparation work.
- relative_cwd = cleanup_path(relative_cwd)
- # Creates the right set of variables here. We only care about PATH_VARIABLES.
- variables = dict(
- ('<(%s)' % k, variables[k].replace(os.path.sep, '/'))
- for k in PATH_VARIABLES if k in variables)
-
- # Actual work: Process the files.
- # TODO(maruel): if all the files in a directory are in part tracked and in
- # part untracked, the directory will not be extracted. Tracked files should be
- # 'promoted' to be untracked as needed.
- tracked = trace_inputs.extract_directories(
- root_dir, tracked, default_blacklist)
- untracked = trace_inputs.extract_directories(
- root_dir, untracked, default_blacklist)
- # touched is not compressed, otherwise it would result in files to be archived
- # that we don't need.
-
- def fix(f):
- """Bases the file on the most restrictive variable."""
- logging.debug('fix(%s)' % f)
- # Important, GYP stores the files with / and not \.
- f = f.replace(os.path.sep, '/')
- # If it's not already a variable.
- if not f.startswith('<'):
- # relative_cwd is usually the directory containing the gyp file. It may be
- # empty if the whole directory containing the gyp file is needed.
- f = posix_relpath(f, relative_cwd) or './'
-
- for variable, root_path in variables.iteritems():
- if f.startswith(root_path):
- f = variable + f[len(root_path):]
- break
-
- # Now strips off known files we want to ignore and to any specific mangling
- # as necessary. It's easier to do it here than generate a blacklist.
- match = EXECUTABLE.match(f)
- if match:
- return match.group(1) + '<(EXECUTABLE_SUFFIX)'
-
- # Blacklist logs and 'First Run' in the PRODUCT_DIR. First Run is not
- # created by the compile, but by the test itself.
- if LOG_FILE.match(f) or f == '<(PRODUCT_DIR)/First Run':
- return None
-
- if sys.platform == 'darwin':
- # On OSX, the name of the output is dependent on gyp define, it can be
- # 'Google Chrome.app' or 'Chromium.app', same for 'XXX
- # Framework.framework'. Furthermore, they are versioned with a gyp
- # variable. To lower the complexity of the .isolate file, remove all the
- # individual entries that show up under any of the 4 entries and replace
- # them with the directory itself. Overall, this results in a bit more
- # files than strictly necessary.
- OSX_BUNDLES = (
- '<(PRODUCT_DIR)/Chromium Framework.framework/',
- '<(PRODUCT_DIR)/Chromium.app/',
- '<(PRODUCT_DIR)/Google Chrome Framework.framework/',
- '<(PRODUCT_DIR)/Google Chrome.app/',
- )
- for prefix in OSX_BUNDLES:
- if f.startswith(prefix):
- # Note this result in duplicate values, so the a set() must be used to
- # remove duplicates.
- return prefix
-
- return f
-
- tracked = set(filter(None, (fix(f.path) for f in tracked)))
- untracked = set(filter(None, (fix(f.path) for f in untracked)))
- touched = set(filter(None, (fix(f.path) for f in touched)))
- out = classify_files(root_dir, tracked, untracked)
- if touched:
- out[KEY_TOUCHED] = sorted(touched)
- return out
-
-
-def generate_isolate(
- tracked, untracked, touched, root_dir, variables, relative_cwd):
- """Generates a clean and complete .isolate file."""
- result = generate_simplified(
- tracked, untracked, touched, root_dir, variables, relative_cwd)
- return {
- 'conditions': [
- ['OS=="%s"' % get_flavor(), {
- 'variables': result,
- }],
- ],
- }
-
-
-def split_touched(files):
- """Splits files that are touched vs files that are read."""
- tracked = []
- touched = []
- for f in files:
- if f.size:
- tracked.append(f)
- else:
- touched.append(f)
- return tracked, touched
-
-
-def pretty_print(variables, stdout):
- """Outputs a gyp compatible list from the decoded variables.
-
- Similar to pprint.print() but with NIH syndrome.
- """
- # Order the dictionary keys by these keys in priority.
- ORDER = (
- 'variables', 'condition', 'command', 'relative_cwd', 'read_only',
- KEY_TRACKED, KEY_UNTRACKED)
-
- def sorting_key(x):
- """Gives priority to 'most important' keys before the others."""
- if x in ORDER:
- return str(ORDER.index(x))
- return x
-
- def loop_list(indent, items):
- for item in items:
- if isinstance(item, basestring):
- stdout.write('%s\'%s\',\n' % (indent, item))
- elif isinstance(item, dict):
- stdout.write('%s{\n' % indent)
- loop_dict(indent + ' ', item)
- stdout.write('%s},\n' % indent)
- elif isinstance(item, list):
- # A list inside a list will write the first item embedded.
- stdout.write('%s[' % indent)
- for index, i in enumerate(item):
- if isinstance(i, basestring):
- stdout.write(
- '\'%s\', ' % i.replace('\\', '\\\\').replace('\'', '\\\''))
- elif isinstance(i, dict):
- stdout.write('{\n')
- loop_dict(indent + ' ', i)
- if index != len(item) - 1:
- x = ', '
- else:
- x = ''
- stdout.write('%s}%s' % (indent, x))
- else:
- assert False
- stdout.write('],\n')
- else:
- assert False
-
- def loop_dict(indent, items):
- for key in sorted(items, key=sorting_key):
- item = items[key]
- stdout.write("%s'%s': " % (indent, key))
- if isinstance(item, dict):
- stdout.write('{\n')
- loop_dict(indent + ' ', item)
- stdout.write(indent + '},\n')
- elif isinstance(item, list):
- stdout.write('[\n')
- loop_list(indent + ' ', item)
- stdout.write(indent + '],\n')
- elif isinstance(item, basestring):
- stdout.write(
- '\'%s\',\n' % item.replace('\\', '\\\\').replace('\'', '\\\''))
- elif item in (True, False, None):
- stdout.write('%s\n' % item)
- else:
- assert False, item
-
- stdout.write('{\n')
- loop_dict(' ', variables)
- stdout.write('}\n')
-
-
-def union(lhs, rhs):
- """Merges two compatible datastructures composed of dict/list/set."""
- assert lhs is not None or rhs is not None
- if lhs is None:
- return copy.deepcopy(rhs)
- if rhs is None:
- return copy.deepcopy(lhs)
- assert type(lhs) == type(rhs), (lhs, rhs)
- if hasattr(lhs, 'union'):
- # Includes set, OSSettings and Configs.
- return lhs.union(rhs)
- if isinstance(lhs, dict):
- return dict((k, union(lhs.get(k), rhs.get(k))) for k in set(lhs).union(rhs))
- elif isinstance(lhs, list):
- # Do not go inside the list.
- return lhs + rhs
- assert False, type(lhs)
-
-
-def extract_comment(content):
- """Extracts file level comment."""
- out = []
- for line in content.splitlines(True):
- if line.startswith('#'):
- out.append(line)
- else:
- break
- return ''.join(out)
-
-
-def eval_content(content):
- """Evaluates a python file and return the value defined in it.
-
- Used in practice for .isolate files.
- """
- globs = {'__builtins__': None}
- locs = {}
- value = eval(content, globs, locs)
- assert locs == {}, locs
- assert globs == {'__builtins__': None}, globs
- return value
-
-
-def verify_variables(variables):
- """Verifies the |variables| dictionary is in the expected format."""
- VALID_VARIABLES = [
- KEY_TOUCHED,
- KEY_TRACKED,
- KEY_UNTRACKED,
- 'command',
- 'read_only',
- ]
- assert isinstance(variables, dict), variables
- assert set(VALID_VARIABLES).issuperset(set(variables)), variables.keys()
- for name, value in variables.iteritems():
- if name == 'read_only':
- assert value in (True, False, None), value
- else:
- assert isinstance(value, list), value
- assert all(isinstance(i, basestring) for i in value), value
-
-
-def verify_condition(condition):
- """Verifies the |condition| dictionary is in the expected format."""
- VALID_INSIDE_CONDITION = ['variables']
- assert isinstance(condition, list), condition
- assert 2 <= len(condition) <= 3, condition
- assert re.match(r'OS==\"([a-z]+)\"', condition[0]), condition[0]
- for c in condition[1:]:
- assert isinstance(c, dict), c
- assert set(VALID_INSIDE_CONDITION).issuperset(set(c)), c.keys()
- verify_variables(c.get('variables', {}))
-
-
-def verify_root(value):
- VALID_ROOTS = ['variables', 'conditions']
- assert isinstance(value, dict), value
- assert set(VALID_ROOTS).issuperset(set(value)), value.keys()
- verify_variables(value.get('variables', {}))
-
- conditions = value.get('conditions', [])
- assert isinstance(conditions, list), conditions
- for condition in conditions:
- verify_condition(condition)
-
-
-def remove_weak_dependencies(values, key, item, item_oses):
- """Remove any oses from this key if the item is already under a strong key."""
- if key == KEY_TOUCHED:
- for stronger_key in (KEY_TRACKED, KEY_UNTRACKED):
- oses = values.get(stronger_key, {}).get(item, None)
- if oses:
- item_oses -= oses
-
- return item_oses
-
-
-def invert_map(variables):
- """Converts a dict(OS, dict(deptype, list(dependencies)) to a flattened view.
-
- Returns a tuple of:
- 1. dict(deptype, dict(dependency, set(OSes)) for easier processing.
- 2. All the OSes found as a set.
- """
- KEYS = (
- KEY_TOUCHED,
- KEY_TRACKED,
- KEY_UNTRACKED,
- 'command',
- 'read_only',
- )
- out = dict((key, {}) for key in KEYS)
- for os_name, values in variables.iteritems():
- for key in (KEY_TOUCHED, KEY_TRACKED, KEY_UNTRACKED):
- for item in values.get(key, []):
- out[key].setdefault(item, set()).add(os_name)
-
- # command needs special handling.
- command = tuple(values.get('command', []))
- out['command'].setdefault(command, set()).add(os_name)
-
- # read_only needs special handling.
- out['read_only'].setdefault(values.get('read_only'), set()).add(os_name)
- return out, set(variables)
-
-
-def reduce_inputs(values, oses):
- """Reduces the invert_map() output to the strictest minimum list.
-
- 1. Construct the inverse map first.
- 2. Look at each individual file and directory, map where they are used and
- reconstruct the inverse dictionary.
- 3. Do not convert back to negative if only 2 OSes were merged.
-
- Returns a tuple of:
- 1. the minimized dictionary
- 2. oses passed through as-is.
- """
- KEYS = (
- KEY_TOUCHED,
- KEY_TRACKED,
- KEY_UNTRACKED,
- 'command',
- 'read_only',
- )
- out = dict((key, {}) for key in KEYS)
- assert all(oses), oses
- if len(oses) > 2:
- for key in KEYS:
- for item, item_oses in values.get(key, {}).iteritems():
- item_oses = remove_weak_dependencies(values, key, item, item_oses)
- if not item_oses:
- continue
-
- # Converts all oses.difference('foo') to '!foo'.
- assert all(item_oses), item_oses
- missing = oses.difference(item_oses)
- if len(missing) == 1:
- # Replace it with a negative.
- out[key][item] = set(['!' + tuple(missing)[0]])
- elif not missing:
- out[key][item] = set([None])
- else:
- out[key][item] = set(item_oses)
- else:
- for key in KEYS:
- for item, item_oses in values.get(key, {}).iteritems():
- item_oses = remove_weak_dependencies(values, key, item, item_oses)
- if not item_oses:
- continue
-
- # Converts all oses.difference('foo') to '!foo'.
- assert None not in item_oses, item_oses
- out[key][item] = set(item_oses)
- return out, oses
-
-
-def convert_map_to_isolate_dict(values, oses):
- """Regenerates back a .isolate configuration dict from files and dirs
- mappings generated from reduce_inputs().
- """
- # First, inverse the mapping to make it dict first.
- config = {}
- for key in values:
- for item, oses in values[key].iteritems():
- if item is None:
- # For read_only default.
- continue
- for cond_os in oses:
- cond_key = None if cond_os is None else cond_os.lstrip('!')
- # Insert the if/else dicts.
- condition_values = config.setdefault(cond_key, [{}, {}])
- # If condition is negative, use index 1, else use index 0.
- cond_value = condition_values[int((cond_os or '').startswith('!'))]
- variables = cond_value.setdefault('variables', {})
-
- if item in (True, False):
- # One-off for read_only.
- variables[key] = item
- else:
- if isinstance(item, tuple) and item:
- # One-off for command.
- # Do not merge lists and do not sort!
- # Note that item is a tuple.
- assert key not in variables
- variables[key] = list(item)
- elif item:
- # The list of items (files or dirs). Append the new item and keep
- # the list sorted.
- l = variables.setdefault(key, [])
- l.append(item)
- l.sort()
-
- out = {}
- for o in sorted(config):
- d = config[o]
- if o is None:
- assert not d[1]
- out = union(out, d[0])
- else:
- c = out.setdefault('conditions', [])
- if d[1]:
- c.append(['OS=="%s"' % o] + d)
- else:
- c.append(['OS=="%s"' % o] + d[0:1])
- return out
-
-
-### Internal state files.
-
-
-class OSSettings(object):
- """Represents the dependencies for an OS. The structure is immutable.
-
- It's the .isolate settings for a specific file.
- """
- def __init__(self, name, values):
- self.name = name
- verify_variables(values)
- self.touched = sorted(values.get(KEY_TOUCHED, []))
- self.tracked = sorted(values.get(KEY_TRACKED, []))
- self.untracked = sorted(values.get(KEY_UNTRACKED, []))
- self.command = values.get('command', [])[:]
- self.read_only = values.get('read_only')
-
- def union(self, rhs):
- assert self.name == rhs.name
- assert not (self.command and rhs.command)
- var = {
- KEY_TOUCHED: sorted(self.touched + rhs.touched),
- KEY_TRACKED: sorted(self.tracked + rhs.tracked),
- KEY_UNTRACKED: sorted(self.untracked + rhs.untracked),
- 'command': self.command or rhs.command,
- 'read_only': rhs.read_only if self.read_only is None else self.read_only,
- }
- return OSSettings(self.name, var)
-
- def flatten(self):
- out = {}
- if self.command:
- out['command'] = self.command
- if self.touched:
- out[KEY_TOUCHED] = self.touched
- if self.tracked:
- out[KEY_TRACKED] = self.tracked
- if self.untracked:
- out[KEY_UNTRACKED] = self.untracked
- if self.read_only is not None:
- out['read_only'] = self.read_only
- return out
-
-
-class Configs(object):
- """Represents a processed .isolate file.
-
- Stores the file in a processed way, split by each the OS-specific
- configurations.
-
- The self.per_os[None] member contains all the 'else' clauses plus the default
- values. It is not included in the flatten() result.
- """
- def __init__(self, oses, file_comment):
- self.file_comment = file_comment
- self.per_os = {
- None: OSSettings(None, {}),
- }
- self.per_os.update(dict((name, OSSettings(name, {})) for name in oses))
-
- def union(self, rhs):
- items = list(set(self.per_os.keys() + rhs.per_os.keys()))
- # Takes the first file comment, prefering lhs.
- out = Configs(items, self.file_comment or rhs.file_comment)
- for key in items:
- out.per_os[key] = union(self.per_os.get(key), rhs.per_os.get(key))
- return out
-
- def add_globals(self, values):
- for key in self.per_os:
- self.per_os[key] = self.per_os[key].union(OSSettings(key, values))
-
- def add_values(self, for_os, values):
- self.per_os[for_os] = self.per_os[for_os].union(OSSettings(for_os, values))
-
- def add_negative_values(self, for_os, values):
- """Includes the variables to all OSes except |for_os|.
-
- This includes 'None' so unknown OSes gets it too.
- """
- for key in self.per_os:
- if key != for_os:
- self.per_os[key] = self.per_os[key].union(OSSettings(key, values))
-
- def flatten(self):
- """Returns a flat dictionary representation of the configuration.
-
- Skips None pseudo-OS.
- """
- return dict(
- (k, v.flatten()) for k, v in self.per_os.iteritems() if k is not None)
-
-
-def load_isolate_as_config(value, file_comment, default_oses):
- """Parses one .isolate file and returns a Configs() instance.
-
- |value| is the loaded dictionary that was defined in the gyp file.
-
- The expected format is strict, anything diverting from the format below will
- throw an assert:
- {
- 'variables': {
- 'command': [
- ...
- ],
- 'isolate_dependency_tracked': [
- ...
- ],
- 'isolate_dependency_untracked': [
- ...
- ],
- 'read_only': False,
- },
- 'conditions': [
- ['OS=="<os>"', {
- 'variables': {
- ...
- },
- }, { # else
- 'variables': {
- ...
- },
- }],
- ...
- ],
- }
- """
- verify_root(value)
-
- # Scan to get the list of OSes.
- conditions = value.get('conditions', [])
- oses = set(re.match(r'OS==\"([a-z]+)\"', c[0]).group(1) for c in conditions)
- oses = oses.union(default_oses)
- configs = Configs(oses, file_comment)
-
- # Global level variables.
- configs.add_globals(value.get('variables', {}))
-
- # OS specific variables.
- for condition in conditions:
- condition_os = re.match(r'OS==\"([a-z]+)\"', condition[0]).group(1)
- configs.add_values(condition_os, condition[1].get('variables', {}))
- if len(condition) > 2:
- configs.add_negative_values(
- condition_os, condition[2].get('variables', {}))
- return configs
-
-
-def load_isolate_for_flavor(content, flavor):
- """Loads the .isolate file and returns the information unprocessed.
-
- Returns the command, dependencies and read_only flag. The dependencies are
- fixed to use os.path.sep.
- """
- # Load the .isolate file, process its conditions, retrieve the command and
- # dependencies.
- configs = load_isolate_as_config(eval_content(content), None, DEFAULT_OSES)
- config = configs.per_os.get(flavor) or configs.per_os.get(None)
- if not config:
- raise ExecutionError('Failed to load configuration for \'%s\'' % flavor)
- # Merge tracked and untracked dependencies, isolate.py doesn't care about the
- # trackability of the dependencies, only the build tool does.
- dependencies = [
- f.replace('/', os.path.sep) for f in config.tracked + config.untracked
- ]
- touched = [f.replace('/', os.path.sep) for f in config.touched]
- return config.command, dependencies, touched, config.read_only
-
-
-class Flattenable(object):
- """Represents data that can be represented as a json file."""
- MEMBERS = ()
-
- def flatten(self):
- """Returns a json-serializable version of itself.
-
- Skips None entries.
- """
- items = ((member, getattr(self, member)) for member in self.MEMBERS)
- return dict((member, value) for member, value in items if value is not None)
-
- @classmethod
- def load(cls, data):
- """Loads a flattened version."""
- data = data.copy()
- out = cls()
- for member in out.MEMBERS:
- if member in data:
- # Access to a protected member XXX of a client class
- # pylint: disable=W0212
- out._load_member(member, data.pop(member))
- if data:
- raise ValueError(
- 'Found unexpected entry %s while constructing an object %s' %
- (data, cls.__name__), data, cls.__name__)
- return out
-
- def _load_member(self, member, value):
- """Loads a member into self."""
- setattr(self, member, value)
-
- @classmethod
- def load_file(cls, filename):
- """Loads the data from a file or return an empty instance."""
- out = cls()
- try:
- out = cls.load(trace_inputs.read_json(filename))
- logging.debug('Loaded %s(%s)' % (cls.__name__, filename))
- except (IOError, ValueError):
- logging.warn('Failed to load %s' % filename)
- return out
-
-
-class Result(Flattenable):
- """Describes the content of a .result file.
-
- This file is used by run_test_from_archive.py so its content is strictly only
- what is necessary to run the test outside of a checkout.
-
- It is important to note that the 'files' dict keys are using native OS path
- separator instead of '/' used in .isolate file.
- """
- MEMBERS = (
- 'command',
- 'files',
- 'os',
- 'read_only',
- 'relative_cwd',
- )
-
- os = get_flavor()
-
- def __init__(self):
- super(Result, self).__init__()
- self.command = []
- self.files = {}
- self.read_only = None
- self.relative_cwd = None
-
- def update(self, command, infiles, touched, read_only, relative_cwd):
- """Updates the result state with new information."""
- self.command = command
- # Add new files.
- for f in infiles:
- self.files.setdefault(f, {})
- for f in touched:
- self.files.setdefault(f, {})['touched_only'] = True
- # Prune extraneous files that are not a dependency anymore.
- for f in set(self.files).difference(set(infiles).union(touched)):
- del self.files[f]
- if read_only is not None:
- self.read_only = read_only
- self.relative_cwd = relative_cwd
-
- def _load_member(self, member, value):
- if member == 'os':
- if value != self.os:
- raise run_test_from_archive.ConfigError(
- 'The .results file was created on another platform')
- else:
- super(Result, self)._load_member(member, value)
-
- def __str__(self):
- out = '%s(\n' % self.__class__.__name__
- out += ' command: %s\n' % self.command
- out += ' files: %d\n' % len(self.files)
- out += ' read_only: %s\n' % self.read_only
- out += ' relative_cwd: %s)' % self.relative_cwd
- return out
-
-
-class SavedState(Flattenable):
- """Describes the content of a .state file.
-
- The items in this file are simply to improve the developer's life and aren't
- used by run_test_from_archive.py. This file can always be safely removed.
-
- isolate_file permits to find back root_dir, variables are used for stateful
- rerun.
- """
- MEMBERS = (
- 'isolate_file',
- 'variables',
- )
-
- def __init__(self):
- super(SavedState, self).__init__()
- self.isolate_file = None
- self.variables = {}
-
- def update(self, isolate_file, variables):
- """Updates the saved state with new information."""
- self.isolate_file = isolate_file
- self.variables.update(variables)
-
- @classmethod
- def load(cls, data):
- out = super(SavedState, cls).load(data)
- if out.isolate_file:
- out.isolate_file = trace_inputs.get_native_path_case(out.isolate_file)
- return out
-
- def __str__(self):
- out = '%s(\n' % self.__class__.__name__
- out += ' isolate_file: %s\n' % self.isolate_file
- out += ' variables: %s' % ''.join(
- '\n %s=%s' % (k, self.variables[k]) for k in sorted(self.variables))
- out += ')'
- return out
-
-
-class CompleteState(object):
- """Contains all the state to run the task at hand."""
- def __init__(self, result_file, result, saved_state):
- super(CompleteState, self).__init__()
- self.result_file = result_file
- # Contains the data that will be used by run_test_from_archive.py
- self.result = result
- # Contains the data to ease developer's use-case but that is not strictly
- # necessary.
- self.saved_state = saved_state
-
- @classmethod
- def load_files(cls, result_file):
- """Loads state from disk."""
- assert os.path.isabs(result_file), result_file
- return cls(
- result_file,
- Result.load_file(result_file),
- SavedState.load_file(result_to_state(result_file)))
-
- def load_isolate(self, isolate_file, variables):
- """Updates self.result and self.saved_state with information loaded from a
- .isolate file.
-
- Processes the loaded data, deduce root_dir, relative_cwd.
- """
- # Make sure to not depend on os.getcwd().
- assert os.path.isabs(isolate_file), isolate_file
- logging.info(
- 'CompleteState.load_isolate(%s, %s)' % (isolate_file, variables))
- relative_base_dir = os.path.dirname(isolate_file)
-
- # Processes the variables and update the saved state.
- variables = process_variables(variables, relative_base_dir)
- self.saved_state.update(isolate_file, variables)
-
- with open(isolate_file, 'r') as f:
- # At that point, variables are not replaced yet in command and infiles.
- # infiles may contain directory entries and is in posix style.
- command, infiles, touched, read_only = load_isolate_for_flavor(
- f.read(), get_flavor())
- command = [eval_variables(i, self.saved_state.variables) for i in command]
- infiles = [eval_variables(f, self.saved_state.variables) for f in infiles]
- touched = [eval_variables(f, self.saved_state.variables) for f in touched]
- # root_dir is automatically determined by the deepest root accessed with the
- # form '../../foo/bar'.
- root_dir = determine_root_dir(relative_base_dir, infiles + touched)
- # The relative directory is automatically determined by the relative path
- # between root_dir and the directory containing the .isolate file,
- # isolate_base_dir.
- relative_cwd = os.path.relpath(relative_base_dir, root_dir)
- # Normalize the files based to root_dir. It is important to keep the
- # trailing os.path.sep at that step.
- infiles = [
- relpath(normpath(os.path.join(relative_base_dir, f)), root_dir)
- for f in infiles
- ]
- touched = [
- relpath(normpath(os.path.join(relative_base_dir, f)), root_dir)
- for f in touched
- ]
- # Expand the directories by listing each file inside. Up to now, trailing
- # os.path.sep must be kept. Do not expand 'touched'.
- infiles = expand_directories_and_symlinks(
- root_dir,
- infiles,
- lambda x: re.match(r'.*\.(git|svn|pyc)$', x))
-
- # Finally, update the new stuff in the foo.result file, the file that is
- # used by run_test_from_archive.py.
- self.result.update(command, infiles, touched, read_only, relative_cwd)
- logging.debug(self)
-
- def process_inputs(self, level):
- """Updates self.result.files with the files' mode and hash.
-
- See process_input() for more information.
- """
- for infile in sorted(self.result.files):
- filepath = os.path.join(self.root_dir, infile)
- self.result.files[infile] = process_input(
- filepath, self.result.files[infile], level, self.result.read_only)
-
- def save_files(self):
- """Saves both self.result and self.saved_state."""
- logging.debug('Dumping to %s' % self.result_file)
- trace_inputs.write_json(self.result_file, self.result.flatten(), True)
- total_bytes = sum(i.get('size', 0) for i in self.result.files.itervalues())
- if total_bytes:
- logging.debug('Total size: %d bytes' % total_bytes)
- saved_state_file = result_to_state(self.result_file)
- logging.debug('Dumping to %s' % saved_state_file)
- trace_inputs.write_json(saved_state_file, self.saved_state.flatten(), True)
-
- @property
- def root_dir(self):
- """isolate_file is always inside relative_cwd relative to root_dir."""
- isolate_dir = os.path.dirname(self.saved_state.isolate_file)
- # Special case '.'.
- if self.result.relative_cwd == '.':
- return isolate_dir
- assert isolate_dir.endswith(self.result.relative_cwd), (
- isolate_dir, self.result.relative_cwd)
- return isolate_dir[:-(len(self.result.relative_cwd) + 1)]
-
- @property
- def resultdir(self):
- """Directory containing the results, usually equivalent to the variable
- PRODUCT_DIR.
- """
- return os.path.dirname(self.result_file)
-
- def __str__(self):
- def indent(data, indent_length):
- """Indents text."""
- spacing = ' ' * indent_length
- return ''.join(spacing + l for l in str(data).splitlines(True))
-
- out = '%s(\n' % self.__class__.__name__
- out += ' root_dir: %s\n' % self.root_dir
- out += ' result: %s\n' % indent(self.result, 2)
- out += ' saved_state: %s)' % indent(self.saved_state, 2)
- return out
-
-
-def load_complete_state(options, level):
- """Loads a CompleteState.
-
- This includes data from .isolate, .result and .state files.
-
- Arguments:
- options: Options instance generated with OptionParserIsolate.
- level: Amount of data to fetch.
- """
- if options.result:
- # Load the previous state if it was present. Namely, "foo.result" and
- # "foo.state".
- complete_state = CompleteState.load_files(options.result)
- else:
- # Constructs a dummy object that cannot be saved. Useful for temporary
- # commands like 'run'.
- complete_state = CompleteState(None, Result(), SavedState())
- options.isolate = options.isolate or complete_state.saved_state.isolate_file
- if not options.isolate:
- raise ExecutionError('A .isolate file is required.')
- if (complete_state.saved_state.isolate_file and
- options.isolate != complete_state.saved_state.isolate_file):
- raise ExecutionError(
- '%s and %s do not match.' % (
- options.isolate, complete_state.saved_state.isolate_file))
-
- # Then load the .isolate and expands directories.
- complete_state.load_isolate(options.isolate, options.variables)
-
- # Regenerate complete_state.result.files.
- complete_state.process_inputs(level)
- return complete_state
-
-
-def read_trace_as_isolate_dict(complete_state):
- """Reads a trace and returns the .isolate dictionary."""
- api = trace_inputs.get_api()
- logfile = complete_state.result_file + '.log'
- if not os.path.isfile(logfile):
- raise ExecutionError(
- 'No log file \'%s\' to read, did you forget to \'trace\'?' % logfile)
- try:
- results = trace_inputs.load_trace(
- logfile, complete_state.root_dir, api, default_blacklist)
- tracked, touched = split_touched(results.existent)
- value = generate_isolate(
- tracked,
- [],
- touched,
- complete_state.root_dir,
- complete_state.saved_state.variables,
- complete_state.result.relative_cwd)
- return value
- except trace_inputs.TracingFailure, e:
- raise ExecutionError(
- 'Reading traces failed for: %s\n%s' %
- (' '.join(complete_state.result.command), str(e)))
-
-
-def print_all(comment, data, stream):
- """Prints a complete .isolate file and its top-level file comment into a
- stream.
- """
- if comment:
- stream.write(comment)
- pretty_print(data, stream)
-
-
-def merge(complete_state):
- """Reads a trace and merges it back into the source .isolate file."""
- value = read_trace_as_isolate_dict(complete_state)
-
- # Now take that data and union it into the original .isolate file.
- with open(complete_state.saved_state.isolate_file, 'r') as f:
- prev_content = f.read()
- prev_config = load_isolate_as_config(
- eval_content(prev_content),
- extract_comment(prev_content),
- DEFAULT_OSES)
- new_config = load_isolate_as_config(value, '', DEFAULT_OSES)
- config = union(prev_config, new_config)
- # pylint: disable=E1103
- data = convert_map_to_isolate_dict(
- *reduce_inputs(*invert_map(config.flatten())))
- print 'Updating %s' % complete_state.saved_state.isolate_file
- with open(complete_state.saved_state.isolate_file, 'wb') as f:
- print_all(config.file_comment, data, f)
-
-
-def CMDcheck(args):
- """Checks that all the inputs are present and update .result."""
- parser = OptionParserIsolate(command='check')
- options, _ = parser.parse_args(args)
- complete_state = load_complete_state(options, NO_INFO)
-
- # Nothing is done specifically. Just store the result and state.
- complete_state.save_files()
- return 0
-
-
-def CMDhashtable(args):
- """Creates a hash table content addressed object store.
-
- All the files listed in the .result file are put in the output directory with
- the file name being the sha-1 of the file's content.
- """
- parser = OptionParserIsolate(command='hashtable')
- options, _ = parser.parse_args(args)
-
- with run_test_from_archive.Profiler('GenerateHashtable'):
- success = False
- try:
- complete_state = load_complete_state(options, WITH_HASH)
- options.outdir = (
- options.outdir or os.path.join(complete_state.resultdir, 'hashtable'))
- # Make sure that complete_state isn't modified until save_files() is
- # called, because any changes made to it here will propagate to the files
- # created (which is probably not intended).
- complete_state.save_files()
-
- logging.info('Creating content addressed object store with %d item',
- len(complete_state.result.files))
-
- with open(complete_state.result_file, 'rb') as f:
- manifest_hash = hashlib.sha1(f.read()).hexdigest()
- manifest_metadata = {'sha-1': manifest_hash}
-
- infiles = complete_state.result.files
- infiles[complete_state.result_file] = manifest_metadata
-
- if re.match(r'^https?://.+$', options.outdir):
- upload_sha1_tree(
- base_url=options.outdir,
- indir=complete_state.root_dir,
- infiles=infiles)
- else:
- recreate_tree(
- outdir=options.outdir,
- indir=complete_state.root_dir,
- infiles=infiles,
- action=run_test_from_archive.HARDLINK,
- as_sha1=True)
- success = True
- finally:
- # If the command failed, delete the .results file if it exists. This is
- # important so no stale swarm job is executed.
- if not success and os.path.isfile(options.result):
- os.remove(options.result)
-
-
-def CMDnoop(args):
- """Touches --result but does nothing else.
-
- This mode is to help transition since some builders do not have all the test
- data files checked out. Touch result_file and exit silently.
- """
- parser = OptionParserIsolate(command='noop')
- options, _ = parser.parse_args(args)
- # In particular, do not call load_complete_state().
- open(options.result, 'a').close()
- return 0
-
-
-def CMDmerge(args):
- """Reads and merges the data from the trace back into the original .isolate.
-
- Ignores --outdir.
- """
- parser = OptionParserIsolate(command='merge', require_result=False)
- options, _ = parser.parse_args(args)
- complete_state = load_complete_state(options, NO_INFO)
- merge(complete_state)
- return 0
-
-
-def CMDread(args):
- """Reads the trace file generated with command 'trace'.
-
- Ignores --outdir.
- """
- parser = OptionParserIsolate(command='read', require_result=False)
- options, _ = parser.parse_args(args)
- complete_state = load_complete_state(options, NO_INFO)
- value = read_trace_as_isolate_dict(complete_state)
- pretty_print(value, sys.stdout)
- return 0
-
-
-def CMDremap(args):
- """Creates a directory with all the dependencies mapped into it.
-
- Useful to test manually why a test is failing. The target executable is not
- run.
- """
- parser = OptionParserIsolate(command='remap', require_result=False)
- options, _ = parser.parse_args(args)
- complete_state = load_complete_state(options, STATS_ONLY)
-
- if not options.outdir:
- options.outdir = run_test_from_archive.make_temp_dir(
- 'isolate', complete_state.root_dir)
- else:
- if not os.path.isdir(options.outdir):
- os.makedirs(options.outdir)
- print 'Remapping into %s' % options.outdir
- if len(os.listdir(options.outdir)):
- raise ExecutionError('Can\'t remap in a non-empty directory')
- recreate_tree(
- outdir=options.outdir,
- indir=complete_state.root_dir,
- infiles=complete_state.result.files,
- action=run_test_from_archive.HARDLINK,
- as_sha1=False)
- if complete_state.result.read_only:
- run_test_from_archive.make_writable(options.outdir, True)
-
- if complete_state.result_file:
- complete_state.save_files()
- return 0
-
-
-def CMDrun(args):
- """Runs the test executable in an isolated (temporary) directory.
-
- All the dependencies are mapped into the temporary directory and the
- directory is cleaned up after the target exits. Warning: if -outdir is
- specified, it is deleted upon exit.
-
- Argument processing stops at the first non-recognized argument and these
- arguments are appended to the command line of the target to run. For example,
- use: isolate.py -r foo.results -- --gtest_filter=Foo.Bar
- """
- parser = OptionParserIsolate(command='run', require_result=False)
- parser.enable_interspersed_args()
- options, args = parser.parse_args(args)
- complete_state = load_complete_state(options, STATS_ONLY)
- cmd = complete_state.result.command + args
- if not cmd:
- raise ExecutionError('No command to run')
- cmd = trace_inputs.fix_python_path(cmd)
- try:
- if not options.outdir:
- options.outdir = run_test_from_archive.make_temp_dir(
- 'isolate', complete_state.root_dir)
- else:
- if not os.path.isdir(options.outdir):
- os.makedirs(options.outdir)
- recreate_tree(
- outdir=options.outdir,
- indir=complete_state.root_dir,
- infiles=complete_state.result.files,
- action=run_test_from_archive.HARDLINK,
- as_sha1=False)
- cwd = os.path.normpath(
- os.path.join(options.outdir, complete_state.result.relative_cwd))
- if not os.path.isdir(cwd):
- # It can happen when no files are mapped from the directory containing the
- # .isolate file. But the directory must exist to be the current working
- # directory.
- os.makedirs(cwd)
- if complete_state.result.read_only:
- run_test_from_archive.make_writable(options.outdir, True)
- logging.info('Running %s, cwd=%s' % (cmd, cwd))
- result = subprocess.call(cmd, cwd=cwd)
- finally:
- if options.outdir:
- run_test_from_archive.rmtree(options.outdir)
-
- if complete_state.result_file:
- complete_state.save_files()
- return result
-
-
-def CMDtrace(args):
- """Traces the target using trace_inputs.py.
-
- It runs the executable without remapping it, and traces all the files it and
- its child processes access. Then the 'read' command can be used to generate an
- updated .isolate file out of it.
-
- Argument processing stops at the first non-recognized argument and these
- arguments are appended to the command line of the target to run. For example,
- use: isolate.py -r foo.results -- --gtest_filter=Foo.Bar
- """
- parser = OptionParserIsolate(command='trace')
- parser.enable_interspersed_args()
- parser.add_option(
- '-m', '--merge', action='store_true',
- help='After tracing, merge the results back in the .isolate file')
- options, args = parser.parse_args(args)
- complete_state = load_complete_state(options, STATS_ONLY)
- cmd = complete_state.result.command + args
- if not cmd:
- raise ExecutionError('No command to run')
- cmd = trace_inputs.fix_python_path(cmd)
- cwd = os.path.normpath(os.path.join(
- complete_state.root_dir, complete_state.result.relative_cwd))
- logging.info('Running %s, cwd=%s' % (cmd, cwd))
- api = trace_inputs.get_api()
- logfile = complete_state.result_file + '.log'
- api.clean_trace(logfile)
- try:
- with api.get_tracer(logfile) as tracer:
- result, _ = tracer.trace(
- cmd,
- cwd,
- 'default',
- True)
- except trace_inputs.TracingFailure, e:
- raise ExecutionError('Tracing failed for: %s\n%s' % (' '.join(cmd), str(e)))
-
- complete_state.save_files()
-
- if options.merge:
- merge(complete_state)
-
- return result
-
-
-class OptionParserIsolate(trace_inputs.OptionParserWithNiceDescription):
- """Adds automatic --isolate, --result, --out and --variables handling."""
- def __init__(self, require_result=True, **kwargs):
- trace_inputs.OptionParserWithNiceDescription.__init__(self, **kwargs)
- default_variables = [('OS', get_flavor())]
- if sys.platform in ('win32', 'cygwin'):
- default_variables.append(('EXECUTABLE_SUFFIX', '.exe'))
- else:
- default_variables.append(('EXECUTABLE_SUFFIX', ''))
- group = optparse.OptionGroup(self, "Common options")
- group.add_option(
- '-r', '--result',
- metavar='FILE',
- help='.result file to store the json manifest')
- group.add_option(
- '-i', '--isolate',
- metavar='FILE',
- help='.isolate file to load the dependency data from')
- group.add_option(
- '-V', '--variable',
- nargs=2,
- action='append',
- default=default_variables,
- dest='variables',
- metavar='FOO BAR',
- help='Variables to process in the .isolate file, default: %default. '
- 'Variables are persistent accross calls, they are saved inside '
- '<results>.state')
- group.add_option(
- '-o', '--outdir', metavar='DIR',
- help='Directory used to recreate the tree or store the hash table. '
- 'If the environment variable ISOLATE_HASH_TABLE_DIR exists, it '
- 'will be used. Otherwise, for run and remap, uses a /tmp '
- 'subdirectory. For the other modes, defaults to the directory '
- 'containing --result')
- self.add_option_group(group)
- self.require_result = require_result
-
- def parse_args(self, *args, **kwargs):
- """Makes sure the paths make sense.
-
- On Windows, / and \ are often mixed together in a path.
- """
- options, args = trace_inputs.OptionParserWithNiceDescription.parse_args(
- self, *args, **kwargs)
- if not self.allow_interspersed_args and args:
- self.error('Unsupported argument: %s' % args)
-
- options.variables = dict(options.variables)
-
- if self.require_result and not options.result:
- self.error('--result is required.')
- if options.result and not options.result.endswith('.results'):
- self.error('--result value must end with \'.results\'')
-
- if options.result:
- options.result = os.path.abspath(options.result.replace('/', os.path.sep))
-
- if options.isolate:
- options.isolate = trace_inputs.get_native_path_case(
- os.path.abspath(
- options.isolate.replace('/', os.path.sep)))
-
- if options.outdir and not re.match(r'^https?://.+$', options.outdir):
- options.outdir = os.path.abspath(
- options.outdir.replace('/', os.path.sep))
-
- return options, args
-
-
-### Glue code to make all the commands works magically.
-
-
-CMDhelp = trace_inputs.CMDhelp
-
-
-def main(argv):
- try:
- return trace_inputs.main_impl(argv)
- except (
- ExecutionError,
- run_test_from_archive.MappingError,
- run_test_from_archive.ConfigError) as e:
- sys.stderr.write('\nError: ')
- sys.stderr.write(str(e))
- sys.stderr.write('\n')
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/tools/isolate/isolate_merge.py b/tools/isolate/isolate_merge.py
deleted file mode 100755
index cab86d6..0000000
--- a/tools/isolate/isolate_merge.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Merges multiple OS-specific gyp dependency lists into one that works on all
-of them.
-
-The logic is relatively simple. Takes the current conditions, add more
-condition, find the strict subset. Done.
-"""
-
-import logging
-import sys
-
-from isolate import Configs, DEFAULT_OSES, eval_content, extract_comment
-from isolate import load_isolate_as_config, union, convert_map_to_isolate_dict
-from isolate import reduce_inputs, invert_map, print_all
-import run_test_cases
-
-
-def load_isolates(items, default_oses):
- """Parses each .isolate file and returns the merged results.
-
- It only loads what load_isolate_as_config() can process.
-
- Return values:
- files: dict(filename, set(OS where this filename is a dependency))
- dirs: dict(dirame, set(OS where this dirname is a dependency))
- oses: set(all the OSes referenced)
- """
- configs = Configs(default_oses, None)
- for item in items:
- logging.debug('loading %s' % item)
- with open(item, 'r') as f:
- content = f.read()
- new_config = load_isolate_as_config(
- eval_content(content), extract_comment(content), default_oses)
- logging.debug('has OSes: %s' % ','.join(k for k in new_config.per_os if k))
- configs = union(configs, new_config)
- logging.debug('Total OSes: %s' % ','.join(k for k in configs.per_os if k))
- return configs
-
-
-def main(args=None):
- parser = run_test_cases.OptionParserWithLogging(
- usage='%prog <options> [file1] [file2] ...')
- parser.add_option(
- '-o', '--output', help='Output to file instead of stdout')
- parser.add_option(
- '--os', default=','.join(DEFAULT_OSES),
- help='Inject the list of OSes, default: %default')
-
- options, args = parser.parse_args(args)
-
- configs = load_isolates(args, options.os.split(','))
- data = convert_map_to_isolate_dict(
- *reduce_inputs(*invert_map(configs.flatten())))
- if options.output:
- with open(options.output, 'wb') as f:
- print_all(configs.file_comment, data, f)
- else:
- print_all(configs.file_comment, data, sys.stdout)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/list_test_cases.py b/tools/isolate/list_test_cases.py
deleted file mode 100755
index da741e2..0000000
--- a/tools/isolate/list_test_cases.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""List all the test cases for a google test.
-
-See more info at http://code.google.com/p/googletest/.
-"""
-
-import sys
-
-import run_test_cases
-
-
-def main():
- """CLI frontend to validate arguments."""
- parser = run_test_cases.OptionParserWithTestSharding(
- usage='%prog <options> [gtest]')
- parser.add_option(
- '-d', '--disabled',
- action='store_true',
- help='Include DISABLED_ tests')
- parser.add_option(
- '-f', '--fails',
- action='store_true',
- help='Include FAILS_ tests')
- parser.add_option(
- '-F', '--flaky',
- action='store_true',
- help='Include FLAKY_ tests')
- options, args = parser.parse_args()
- if not args:
- parser.error('Please provide the executable to run')
-
- cmd = run_test_cases.fix_python_path(args)
- try:
- tests = run_test_cases.list_test_cases(
- cmd,
- options.index,
- options.shards,
- options.disabled,
- options.fails,
- options.flaky)
- for test in tests:
- print test
- except run_test_cases.Failure, e:
- print e.args[0]
- return e.args[1]
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/run_test_cases.py b/tools/isolate/run_test_cases.py
deleted file mode 100755
index 2520cc0..0000000
--- a/tools/isolate/run_test_cases.py
+++ /dev/null
@@ -1,904 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs each test cases as a single shard, single process execution.
-
-Similar to sharding_supervisor.py but finer grained. Runs multiple instances in
-parallel.
-"""
-
-import fnmatch
-import json
-import logging
-import optparse
-import os
-import Queue
-import subprocess
-import sys
-import threading
-import time
-
-
-# These are known to influence the way the output is generated.
-KNOWN_GTEST_ENV_VARS = [
- 'GTEST_ALSO_RUN_DISABLED_TESTS',
- 'GTEST_BREAK_ON_FAILURE',
- 'GTEST_CATCH_EXCEPTIONS',
- 'GTEST_COLOR',
- 'GTEST_FILTER',
- 'GTEST_OUTPUT',
- 'GTEST_PRINT_TIME',
- 'GTEST_RANDOM_SEED',
- 'GTEST_REPEAT',
- 'GTEST_SHARD_INDEX',
- 'GTEST_SHARD_STATUS_FILE',
- 'GTEST_SHUFFLE',
- 'GTEST_THROW_ON_FAILURE',
- 'GTEST_TOTAL_SHARDS',
-]
-
-# These needs to be poped out before running a test.
-GTEST_ENV_VARS_TO_REMOVE = [
- # TODO(maruel): Handle.
- 'GTEST_ALSO_RUN_DISABLED_TESTS',
- 'GTEST_FILTER',
- # TODO(maruel): Handle.
- 'GTEST_OUTPUT',
- # TODO(maruel): Handle.
- 'GTEST_RANDOM_SEED',
- # TODO(maruel): Handle.
- 'GTEST_REPEAT',
- 'GTEST_SHARD_INDEX',
- # TODO(maruel): Handle.
- 'GTEST_SHUFFLE',
- 'GTEST_TOTAL_SHARDS',
-]
-
-
-def num_processors():
- """Returns the number of processors.
-
- Python on OSX 10.6 raises a NotImplementedError exception.
- """
- try:
- # Multiprocessing
- import multiprocessing
- return multiprocessing.cpu_count()
- except: # pylint: disable=W0702
- # Mac OS 10.6
- return int(os.sysconf('SC_NPROCESSORS_ONLN'))
-
-
-if subprocess.mswindows:
- import msvcrt # pylint: disable=F0401
- from ctypes import wintypes
- from ctypes import windll
-
- def ReadFile(handle, desired_bytes):
- """Calls kernel32.ReadFile()."""
- c_read = wintypes.DWORD()
- buff = wintypes.create_string_buffer(desired_bytes+1)
- windll.kernel32.ReadFile(
- handle, buff, desired_bytes, wintypes.byref(c_read), None)
- # NULL terminate it.
- buff[c_read.value] = '\x00'
- return wintypes.GetLastError(), buff.value
-
- def PeekNamedPipe(handle):
- """Calls kernel32.PeekNamedPipe(). Simplified version."""
- c_avail = wintypes.DWORD()
- c_message = wintypes.DWORD()
- success = windll.kernel32.PeekNamedPipe(
- handle, None, 0, None, wintypes.byref(c_avail),
- wintypes.byref(c_message))
- if not success:
- raise OSError(wintypes.GetLastError())
- return c_avail.value
-
- def recv_impl(conn, maxsize, timeout):
- """Reads from a pipe without blocking."""
- if timeout:
- start = time.time()
- x = msvcrt.get_osfhandle(conn.fileno())
- try:
- while True:
- avail = min(PeekNamedPipe(x), maxsize)
- if avail:
- return ReadFile(x, avail)[1]
- if not timeout or (time.time() - start) >= timeout:
- return
- # Polling rocks.
- time.sleep(0.001)
- except OSError:
- # Not classy but fits our needs.
- return None
-
-else:
- import fcntl
- import select
-
- def recv_impl(conn, maxsize, timeout):
- """Reads from a pipe without blocking."""
- if not select.select([conn], [], [], timeout)[0]:
- return None
-
- # Temporarily make it non-blocking.
- flags = fcntl.fcntl(conn, fcntl.F_GETFL)
- if not conn.closed:
- fcntl.fcntl(conn, fcntl.F_SETFL, flags | os.O_NONBLOCK)
- try:
- return conn.read(maxsize)
- finally:
- if not conn.closed:
- fcntl.fcntl(conn, fcntl.F_SETFL, flags)
-
-
-class Failure(Exception):
- pass
-
-
-class Popen(subprocess.Popen):
- """Adds timeout support on stdout and stderr.
-
- Inspired by
- http://code.activestate.com/recipes/440554-module-to-allow-asynchronous-subprocess-use-on-win/
- """
- def recv(self, maxsize=None, timeout=None):
- """Reads from stdout asynchronously."""
- return self._recv('stdout', maxsize, timeout)
-
- def recv_err(self, maxsize=None, timeout=None):
- """Reads from stderr asynchronously."""
- return self._recv('stderr', maxsize, timeout)
-
- def _close(self, which):
- getattr(self, which).close()
- setattr(self, which, None)
-
- def _recv(self, which, maxsize, timeout):
- conn = getattr(self, which)
- if conn is None:
- return None
- data = recv_impl(conn, max(maxsize or 1024, 1), timeout or 0)
- if not data:
- return self._close(which)
- if self.universal_newlines:
- data = self._translate_newlines(data)
- return data
-
-
-def call_with_timeout(cmd, timeout, **kwargs):
- """Runs an executable with an optional timeout."""
- proc = Popen(
- cmd,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- **kwargs)
- if timeout:
- start = time.time()
- output = ''
- while proc.poll() is None:
- remaining = max(timeout - (time.time() - start), 0.001)
- data = proc.recv(timeout=remaining)
- if data:
- output += data
- if (time.time() - start) >= timeout:
- break
- if (time.time() - start) >= timeout and proc.poll() is None:
- logging.debug('Kill %s %s' % ((time.time() - start) , timeout))
- proc.kill()
- proc.wait()
- # Try reading a last time.
- while True:
- data = proc.recv()
- if not data:
- break
- output += data
- else:
- # This code path is much faster.
- output = proc.communicate()[0]
- return output, proc.returncode
-
-
-class QueueWithTimeout(Queue.Queue):
- """Implements timeout support in join()."""
-
- # QueueWithTimeout.join: Arguments number differs from overridden method
- # pylint: disable=W0221
- def join(self, timeout=None):
- """Returns True if all tasks are finished."""
- if not timeout:
- return Queue.Queue.join(self)
- start = time.time()
- self.all_tasks_done.acquire()
- try:
- while self.unfinished_tasks:
- remaining = time.time() - start - timeout
- if remaining <= 0:
- break
- self.all_tasks_done.wait(remaining)
- return not self.unfinished_tasks
- finally:
- self.all_tasks_done.release()
-
-
-class WorkerThread(threading.Thread):
- """Keeps the results of each task in a thread-local outputs variable."""
- def __init__(self, tasks, *args, **kwargs):
- super(WorkerThread, self).__init__(*args, **kwargs)
- self._tasks = tasks
- self.outputs = []
- self.exceptions = []
-
- self.daemon = True
- self.start()
-
- def run(self):
- """Runs until a None task is queued."""
- while True:
- task = self._tasks.get()
- if task is None:
- # We're done.
- return
- try:
- func, args, kwargs = task
- self.outputs.append(func(*args, **kwargs))
- except Exception, e:
- logging.error('Caught exception! %s' % e)
- self.exceptions.append(sys.exc_info())
- finally:
- self._tasks.task_done()
-
-
-class ThreadPool(object):
- """Implements a multithreaded worker pool oriented for mapping jobs with
- thread-local result storage.
- """
- def __init__(self, num_threads):
- self._tasks = QueueWithTimeout()
- self._workers = [
- WorkerThread(self._tasks, name='worker-%d' % i)
- for i in range(num_threads)
- ]
-
- def add_task(self, func, *args, **kwargs):
- """Adds a task, a function to be executed by a worker.
-
- The function's return value will be stored in the the worker's thread local
- outputs list.
- """
- self._tasks.put((func, args, kwargs))
-
- def join(self, progress=None, timeout=None):
- """Extracts all the results from each threads unordered."""
- if progress and timeout:
- while not self._tasks.join(timeout):
- progress.print_update()
- progress.print_update()
- else:
- self._tasks.join()
- out = []
- for w in self._workers:
- if w.exceptions:
- raise w.exceptions[0][0], w.exceptions[0][1], w.exceptions[0][2]
- out.extend(w.outputs)
- w.outputs = []
- # Look for exceptions.
- return out
-
- def close(self):
- """Closes all the threads."""
- for _ in range(len(self._workers)):
- # Enqueueing None causes the worker to stop.
- self._tasks.put(None)
- for t in self._workers:
- t.join()
-
- def __enter__(self):
- """Enables 'with' statement."""
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- """Enables 'with' statement."""
- self.close()
-
-
-class Progress(object):
- """Prints progress and accepts updates thread-safely."""
- def __init__(self, size):
- # To be used in the primary thread
- self.last_printed_line = ''
- self.index = 0
- self.start = time.time()
- self.size = size
-
- # To be used in all threads.
- self.queued_lines = Queue.Queue()
-
- def update_item(self, name, index=True, size=False):
- self.queued_lines.put((name, index, size))
-
- def print_update(self):
- """Prints the current status."""
- while True:
- try:
- name, index, size = self.queued_lines.get_nowait()
- except Queue.Empty:
- break
-
- if size:
- self.size += 1
- if index:
- self.index += 1
- alignment = str(len(str(self.size)))
- next_line = ('[%' + alignment + 'd/%d] %6.2fs %s') % (
- self.index,
- self.size,
- time.time() - self.start,
- name)
- # Fill it with whitespace.
- # TODO(maruel): Read the console width when prossible and trim
- # next_line.
- # TODO(maruel): When not a console is used, do not fill with whitepace
- # but use \n instead.
- prefix = '\r' if self.last_printed_line else ''
- line = '%s%s%s' % (
- prefix,
- next_line,
- ' ' * max(0, len(self.last_printed_line) - len(next_line)))
- self.last_printed_line = next_line
- else:
- line = '\n%s\n' % name.strip('\n')
- self.last_printed_line = ''
-
- sys.stdout.write(line)
-
-
-def fix_python_path(cmd):
- """Returns the fixed command line to call the right python executable."""
- out = cmd[:]
- if out[0] == 'python':
- out[0] = sys.executable
- elif out[0].endswith('.py'):
- out.insert(0, sys.executable)
- return out
-
-
-def setup_gtest_env():
- """Copy the enviroment variables and setup for running a gtest."""
- env = os.environ.copy()
- for name in GTEST_ENV_VARS_TO_REMOVE:
- env.pop(name, None)
-
- # Forcibly enable color by default, if not already disabled.
- env.setdefault('GTEST_COLOR', 'on')
-
- return env
-
-
-def gtest_list_tests(cmd):
- """List all the test cases for a google test.
-
- See more info at http://code.google.com/p/googletest/.
- """
- cmd = cmd[:]
- cmd.append('--gtest_list_tests')
- env = setup_gtest_env()
- try:
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- env=env)
- except OSError, e:
- raise Failure('Failed to run %s\n%s' % (' '.join(cmd), str(e)))
- out, err = p.communicate()
- if p.returncode:
- raise Failure(
- 'Failed to run %s\nstdout:\n%s\nstderr:\n%s' %
- (' '.join(cmd), out, err), p.returncode)
- # pylint: disable=E1103
- if err and not err.startswith('Xlib: extension "RANDR" missing on display '):
- logging.error('Unexpected spew in gtest_list_tests:\n%s\n%s', err, cmd)
- return out
-
-
-def filter_shards(tests, index, shards):
- """Filters the shards.
-
- Watch out about integer based arithmetics.
- """
- # The following code could be made more terse but I liked the extra clarity.
- assert 0 <= index < shards
- total = len(tests)
- quotient, remainder = divmod(total, shards)
- # 1 item of each remainder is distributed over the first 0:remainder shards.
- # For example, with total == 5, index == 1, shards == 3
- # min_bound == 2, max_bound == 4.
- min_bound = quotient * index + min(index, remainder)
- max_bound = quotient * (index + 1) + min(index + 1, remainder)
- return tests[min_bound:max_bound]
-
-
-def filter_bad_tests(tests, disabled=False, fails=False, flaky=False):
- """Filters out DISABLED_, FAILS_ or FLAKY_ tests."""
- def starts_with(a, b, prefix):
- return a.startswith(prefix) or b.startswith(prefix)
-
- def valid(test):
- fixture, case = test.split('.', 1)
- if not disabled and starts_with(fixture, case, 'DISABLED_'):
- return False
- if not fails and starts_with(fixture, case, 'FAILS_'):
- return False
- if not flaky and starts_with(fixture, case, 'FLAKY_'):
- return False
- return True
-
- return [test for test in tests if valid(test)]
-
-
-def parse_gtest_cases(out):
- """Returns the flattened list of test cases in the executable.
-
- The returned list is sorted so it is not dependent on the order of the linked
- objects.
-
- Expected format is a concatenation of this:
- TestFixture1
- TestCase1
- TestCase2
- """
- tests = []
- fixture = None
- lines = out.splitlines()
- while lines:
- line = lines.pop(0)
- if not line:
- break
- if not line.startswith(' '):
- fixture = line
- else:
- case = line[2:]
- if case.startswith('YOU HAVE'):
- # It's a 'YOU HAVE foo bar' line. We're done.
- break
- assert ' ' not in case
- tests.append(fixture + case)
- return sorted(tests)
-
-
-def list_test_cases(cmd, index, shards, disabled, fails, flaky):
- """Returns the list of test cases according to the specified criterias."""
- tests = parse_gtest_cases(gtest_list_tests(cmd))
- if shards:
- tests = filter_shards(tests, index, shards)
- return filter_bad_tests(tests, disabled, fails, flaky)
-
-
-class RunSome(object):
- """Thread-safe object deciding if testing should continue."""
- def __init__(self, expected_count, retries, min_failures, max_failure_ratio):
- """Determines if it is better to give up testing after an amount of failures
- and successes.
-
- Arguments:
- - expected_count is the expected number of elements to run.
- - retries is how many time a failing element can be retried. retries should
- be set to the maximum number of retries per failure. This permits
- dampening the curve to determine threshold where to stop.
- - min_failures is the minimal number of failures to tolerate, to put a lower
- limit when expected_count is small. This value is multiplied by the number
- of retries.
- - max_failure_ratio is the the ratio of permitted failures, e.g. 0.1 to stop
- after 10% of failed test cases.
-
- For large values of expected_count, the number of tolerated failures will be
- at maximum "(expected_count * retries) * max_failure_ratio".
-
- For small values of expected_count, the number of tolerated failures will be
- at least "min_failures * retries".
- """
- assert 0 < expected_count
- assert 0 <= retries < 100
- assert 0 <= min_failures
- assert 0. < max_failure_ratio < 1.
- # Constants.
- self._expected_count = expected_count
- self._retries = retries
- self._min_failures = min_failures
- self._max_failure_ratio = max_failure_ratio
-
- self._min_failures_tolerated = self._min_failures * self._retries
- # Pre-calculate the maximum number of allowable failures. Note that
- # _max_failures can be lower than _min_failures.
- self._max_failures_tolerated = round(
- (expected_count * retries) * max_failure_ratio)
-
- # Variables.
- self._lock = threading.Lock()
- self._passed = 0
- self._failures = 0
-
- def should_stop(self):
- """Stops once a threshold was reached. This includes retries."""
- with self._lock:
- # Accept at least the minimum number of failures.
- if self._failures <= self._min_failures_tolerated:
- return False
- return self._failures >= self._max_failures_tolerated
-
- def got_result(self, passed):
- with self._lock:
- if passed:
- self._passed += 1
- else:
- self._failures += 1
-
- def __str__(self):
- return '%s(%d, %d, %d, %.3f)' % (
- self.__class__.__name__,
- self._expected_count,
- self._retries,
- self._min_failures,
- self._max_failure_ratio)
-
-
-class RunAll(object):
- """Never fails."""
- @staticmethod
- def should_stop():
- return False
- @staticmethod
- def got_result(_):
- pass
-
-
-class Runner(object):
- def __init__(self, cmd, cwd_dir, timeout, progress, retry_count, decider):
- # Constants
- self.cmd = cmd[:]
- self.cwd_dir = cwd_dir
- self.timeout = timeout
- self.progress = progress
- self.retry_count = retry_count
- # It is important to remove the shard environment variables since it could
- # conflict with --gtest_filter.
- self.env = setup_gtest_env()
- self.decider = decider
-
- def map(self, test_case):
- """Traces a single test case and returns its output."""
- cmd = self.cmd[:]
- cmd.append('--gtest_filter=%s' % test_case)
- out = []
- for retry in range(self.retry_count):
- if self.decider.should_stop():
- break
-
- start = time.time()
- output, returncode = call_with_timeout(
- cmd,
- self.timeout,
- cwd=self.cwd_dir,
- stderr=subprocess.STDOUT,
- env=self.env)
- duration = time.time() - start
- data = {
- 'test_case': test_case,
- 'returncode': returncode,
- 'duration': duration,
- # It needs to be valid utf-8 otherwise it can't be store.
- 'output': output.decode('ascii', 'ignore').encode('utf-8'),
- }
- if '[ RUN ]' not in output:
- # Can't find gtest marker, mark it as invalid.
- returncode = returncode or 1
- self.decider.got_result(not bool(returncode))
- out.append(data)
- if sys.platform == 'win32':
- output = output.replace('\r\n', '\n')
- size = returncode and retry != self.retry_count - 1
- if retry:
- self.progress.update_item(
- '%s (%.2fs) - retry #%d' % (test_case, duration, retry),
- True,
- size)
- else:
- self.progress.update_item(
- '%s (%.2fs)' % (test_case, duration), True, size)
- if logging.getLogger().isEnabledFor(logging.INFO):
- self.progress.update_item(output, False, False)
- if not returncode:
- break
- else:
- # The test failed. Print its output. No need to print it with logging
- # level at INFO since it was already printed above.
- if not logging.getLogger().isEnabledFor(logging.INFO):
- self.progress.update_item(output, False, False)
- return out
-
-
-def get_test_cases(cmd, whitelist, blacklist, index, shards):
- """Returns the filtered list of test cases.
-
- This is done synchronously.
- """
- try:
- tests = list_test_cases(cmd, index, shards, False, False, False)
- except Failure, e:
- print e.args[0]
- return None
-
- if shards:
- # This is necessary for Swarm log parsing.
- print 'Note: This is test shard %d of %d.' % (index+1, shards)
-
- # Filters the test cases with the two lists.
- if blacklist:
- tests = [
- t for t in tests if not any(fnmatch.fnmatch(t, s) for s in blacklist)
- ]
- if whitelist:
- tests = [
- t for t in tests if any(fnmatch.fnmatch(t, s) for s in whitelist)
- ]
- logging.info('Found %d test cases in %s' % (len(tests), ' '.join(cmd)))
- return tests
-
-
-def LogResults(result_file, results):
- """Write the results out to a file if one is given."""
- if not result_file:
- return
- with open(result_file, 'wb') as f:
- json.dump(results, f, sort_keys=True, indent=2)
-
-
-def run_test_cases(cmd, test_cases, jobs, timeout, run_all, result_file):
- """Traces test cases one by one."""
- if not test_cases:
- return 0
- progress = Progress(len(test_cases))
- retries = 3
- if run_all:
- decider = RunAll()
- else:
- # If 10% of test cases fail, just too bad.
- decider = RunSome(len(test_cases), retries, 2, 0.1)
- with ThreadPool(jobs) as pool:
- function = Runner(cmd, os.getcwd(), timeout, progress, retries, decider).map
- for test_case in test_cases:
- pool.add_task(function, test_case)
- results = pool.join(progress, 0.1)
- duration = time.time() - progress.start
- results = dict((item[0]['test_case'], item) for item in results if item)
- LogResults(result_file, results)
- sys.stdout.write('\n')
- total = len(results)
- if not total:
- return 1
-
- # Classify the results
- success = []
- flaky = []
- fail = []
- nb_runs = 0
- for test_case in sorted(results):
- items = results[test_case]
- nb_runs += len(items)
- if not any(not i['returncode'] for i in items):
- fail.append(test_case)
- elif len(items) > 1 and any(not i['returncode'] for i in items):
- flaky.append(test_case)
- elif len(items) == 1 and items[0]['returncode'] == 0:
- success.append(test_case)
- else:
- assert False, items
-
- print 'Summary:'
- for test_case in sorted(flaky):
- items = results[test_case]
- print '%s is flaky (tried %d times)' % (test_case, len(items))
-
- for test_case in sorted(fail):
- print '%s failed' % (test_case)
-
- if decider.should_stop():
- print '** STOPPED EARLY due to high failure rate **'
- print 'Success: %4d %5.2f%%' % (len(success), len(success) * 100. / total)
- print 'Flaky: %4d %5.2f%%' % (len(flaky), len(flaky) * 100. / total)
- print 'Fail: %4d %5.2f%%' % (len(fail), len(fail) * 100. / total)
- print '%.1fs Done running %d tests with %d executions. %.1f test/s' % (
- duration,
- len(results),
- nb_runs,
- nb_runs / duration)
- return int(bool(fail))
-
-
-class OptionParserWithLogging(optparse.OptionParser):
- """Adds --verbose option."""
- def __init__(self, verbose=0, **kwargs):
- optparse.OptionParser.__init__(self, **kwargs)
- self.add_option(
- '-v', '--verbose',
- action='count',
- default=verbose,
- help='Use multiple times to increase verbosity')
-
- def parse_args(self, *args, **kwargs):
- options, args = optparse.OptionParser.parse_args(self, *args, **kwargs)
- levels = [logging.ERROR, logging.INFO, logging.DEBUG]
- logging.basicConfig(
- level=levels[min(len(levels)-1, options.verbose)],
- format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s')
- return options, args
-
-
-class OptionParserWithTestSharding(OptionParserWithLogging):
- """Adds automatic handling of test sharding"""
- def __init__(self, **kwargs):
- OptionParserWithLogging.__init__(self, **kwargs)
-
- def as_digit(variable, default):
- return int(variable) if variable.isdigit() else default
-
- group = optparse.OptionGroup(self, 'Which shard to run')
- group.add_option(
- '-I', '--index',
- type='int',
- default=as_digit(os.environ.get('GTEST_SHARD_INDEX', ''), None),
- help='Shard index to run')
- group.add_option(
- '-S', '--shards',
- type='int',
- default=as_digit(os.environ.get('GTEST_TOTAL_SHARDS', ''), None),
- help='Total number of shards to calculate from the --index to run')
- self.add_option_group(group)
-
- def parse_args(self, *args, **kwargs):
- options, args = OptionParserWithLogging.parse_args(self, *args, **kwargs)
- if bool(options.shards) != bool(options.index is not None):
- self.error('Use both --index X --shards Y or none of them')
- return options, args
-
-
-class OptionParserWithTestShardingAndFiltering(OptionParserWithTestSharding):
- """Adds automatic handling of test sharding and filtering."""
- def __init__(self, *args, **kwargs):
- OptionParserWithTestSharding.__init__(self, *args, **kwargs)
-
- group = optparse.OptionGroup(self, 'Which test cases to run')
- group.add_option(
- '-w', '--whitelist',
- default=[],
- action='append',
- help='filter to apply to test cases to run, wildcard-style, defaults '
- 'to all test')
- group.add_option(
- '-b', '--blacklist',
- default=[],
- action='append',
- help='filter to apply to test cases to skip, wildcard-style, defaults '
- 'to no test')
- group.add_option(
- '-T', '--test-case-file',
- help='File containing the exact list of test cases to run')
- group.add_option(
- '--gtest_filter',
- default=os.environ.get('GTEST_FILTER', ''),
- help='Runs a single test, provideded to keep compatibility with '
- 'other tools')
- self.add_option_group(group)
-
- def parse_args(self, *args, **kwargs):
- options, args = OptionParserWithTestSharding.parse_args(
- self, *args, **kwargs)
-
- if options.gtest_filter:
- # Override any other option.
- # Based on UnitTestOptions::FilterMatchesTest() in
- # http://code.google.com/p/googletest/source/browse/#svn%2Ftrunk%2Fsrc
- if '-' in options.gtest_filter:
- options.whitelist, options.blacklist = options.gtest_filter.split('-',
- 1)
- else:
- options.whitelist = options.gtest_filter
- options.blacklist = ''
- options.whitelist = [i for i in options.whitelist.split(':') if i]
- options.blacklist = [i for i in options.blacklist.split(':') if i]
-
- return options, args
-
- @staticmethod
- def process_gtest_options(cmd, options):
- """Grabs the test cases."""
- if options.test_case_file:
- with open(options.test_case_file, 'r') as f:
- return sorted(filter(None, f.read().splitlines()))
- else:
- return get_test_cases(
- cmd,
- options.whitelist,
- options.blacklist,
- options.index,
- options.shards)
-
-
-class OptionParserTestCases(OptionParserWithTestShardingAndFiltering):
- def __init__(self, *args, **kwargs):
- OptionParserWithTestShardingAndFiltering.__init__(self, *args, **kwargs)
- self.add_option(
- '-j', '--jobs',
- type='int',
- default=num_processors(),
- help='number of parallel jobs; default=%default')
- self.add_option(
- '-t', '--timeout',
- type='int',
- default=120,
- help='Timeout for a single test case, in seconds default:%default')
-
-
-def main(argv):
- """CLI frontend to validate arguments."""
- parser = OptionParserTestCases(
- usage='%prog <options> [gtest]',
- verbose=int(os.environ.get('ISOLATE_DEBUG', 0)))
- parser.add_option(
- '--run-all',
- action='store_true',
- default=bool(int(os.environ.get('RUN_TEST_CASES_RUN_ALL', '0'))),
- help='Do not fail early when a large number of test cases fail')
- parser.add_option(
- '--no-dump',
- action='store_true',
- help='do not generate a .run_test_cases file')
- parser.add_option(
- '--result',
- default=os.environ.get('RUN_TEST_CASES_RESULT_FILE', ''),
- help='Override the default name of the generated .run_test_cases file')
- parser.add_option(
- '--gtest_list_tests',
- action='store_true',
- help='List all the test cases unformatted. Keeps compatibility with the '
- 'executable itself.')
- options, args = parser.parse_args(argv)
-
- if not args:
- parser.error(
- 'Please provide the executable line to run, if you need fancy things '
- 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster'
- '.')
-
- cmd = fix_python_path(args)
-
- if options.gtest_list_tests:
- # Special case, return the output of the target unmodified.
- return subprocess.call(args + ['--gtest_list_tests'])
-
- test_cases = parser.process_gtest_options(cmd, options)
- if not test_cases:
- # If test_cases is None then there was a problem generating the tests to
- # run, so this should be considered a failure.
- return int(test_cases is None)
-
- if options.no_dump:
- result_file = None
- else:
- if options.result:
- result_file = options.result
- else:
- result_file = '%s.run_test_cases' % args[-1]
-
- return run_test_cases(
- cmd,
- test_cases,
- options.jobs,
- options.timeout,
- options.run_all,
- result_file)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/tools/isolate/run_test_from_archive.py b/tools/isolate/run_test_from_archive.py
deleted file mode 100755
index 92abce2..0000000
--- a/tools/isolate/run_test_from_archive.py
+++ /dev/null
@@ -1,965 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Reads a manifest, creates a tree of hardlinks and runs the test.
-
-Keeps a local cache.
-"""
-
-import ctypes
-import hashlib
-import json
-import logging
-import optparse
-import os
-import Queue
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tempfile
-import threading
-import time
-import urllib
-
-
-# Types of action accepted by recreate_tree().
-HARDLINK, SYMLINK, COPY = range(1, 4)
-
-RE_IS_SHA1 = re.compile(r'^[a-fA-F0-9]{40}$')
-
-
-class ConfigError(ValueError):
- """Generic failure to load a manifest."""
- pass
-
-
-class MappingError(OSError):
- """Failed to recreate the tree."""
- pass
-
-
-def get_flavor():
- """Returns the system default flavor. Copied from gyp/pylib/gyp/common.py."""
- flavors = {
- 'cygwin': 'win',
- 'win32': 'win',
- 'darwin': 'mac',
- 'sunos5': 'solaris',
- 'freebsd7': 'freebsd',
- 'freebsd8': 'freebsd',
- }
- return flavors.get(sys.platform, 'linux')
-
-
-def os_link(source, link_name):
- """Add support for os.link() on Windows."""
- if sys.platform == 'win32':
- if not ctypes.windll.kernel32.CreateHardLinkW(
- unicode(link_name), unicode(source), 0):
- raise OSError()
- else:
- os.link(source, link_name)
-
-
-def readable_copy(outfile, infile):
- """Makes a copy of the file that is readable by everyone."""
- shutil.copy(infile, outfile)
- read_enabled_mode = (os.stat(outfile).st_mode | stat.S_IRUSR |
- stat.S_IRGRP | stat.S_IROTH)
- os.chmod(outfile, read_enabled_mode)
-
-
-def link_file(outfile, infile, action):
- """Links a file. The type of link depends on |action|."""
- logging.debug('Mapping %s to %s' % (infile, outfile))
- if action not in (HARDLINK, SYMLINK, COPY):
- raise ValueError('Unknown mapping action %s' % action)
- if not os.path.isfile(infile):
- raise MappingError('%s is missing' % infile)
- if os.path.isfile(outfile):
- raise MappingError(
- '%s already exist; insize:%d; outsize:%d' %
- (outfile, os.stat(infile).st_size, os.stat(outfile).st_size))
-
- if action == COPY:
- readable_copy(outfile, infile)
- elif action == SYMLINK and sys.platform != 'win32':
- # On windows, symlink are converted to hardlink and fails over to copy.
- os.symlink(infile, outfile)
- else:
- try:
- os_link(infile, outfile)
- except OSError:
- # Probably a different file system.
- logging.warn(
- 'Failed to hardlink, failing back to copy %s to %s' % (
- infile, outfile))
- readable_copy(outfile, infile)
-
-
-def _set_write_bit(path, read_only):
- """Sets or resets the executable bit on a file or directory."""
- mode = os.lstat(path).st_mode
- if read_only:
- mode = mode & 0500
- else:
- mode = mode | 0200
- if hasattr(os, 'lchmod'):
- os.lchmod(path, mode) # pylint: disable=E1101
- else:
- if stat.S_ISLNK(mode):
- # Skip symlink without lchmod() support.
- logging.debug('Can\'t change +w bit on symlink %s' % path)
- return
-
- # TODO(maruel): Implement proper DACL modification on Windows.
- os.chmod(path, mode)
-
-
-def make_writable(root, read_only):
- """Toggle the writable bit on a directory tree."""
- root = os.path.abspath(root)
- for dirpath, dirnames, filenames in os.walk(root, topdown=True):
- for filename in filenames:
- _set_write_bit(os.path.join(dirpath, filename), read_only)
-
- for dirname in dirnames:
- _set_write_bit(os.path.join(dirpath, dirname), read_only)
-
-
-def rmtree(root):
- """Wrapper around shutil.rmtree() to retry automatically on Windows."""
- make_writable(root, False)
- if sys.platform == 'win32':
- for i in range(3):
- try:
- shutil.rmtree(root)
- break
- except WindowsError: # pylint: disable=E0602
- delay = (i+1)*2
- print >> sys.stderr, (
- 'The test has subprocess outliving it. Sleep %d seconds.' % delay)
- time.sleep(delay)
- else:
- shutil.rmtree(root)
-
-
-def is_same_filesystem(path1, path2):
- """Returns True if both paths are on the same filesystem.
-
- This is required to enable the use of hardlinks.
- """
- assert os.path.isabs(path1), path1
- assert os.path.isabs(path2), path2
- if sys.platform == 'win32':
- # If the drive letter mismatches, assume it's a separate partition.
- # TODO(maruel): It should look at the underlying drive, a drive letter could
- # be a mount point to a directory on another drive.
- assert re.match(r'^[a-zA-Z]\:\\.*', path1), path1
- assert re.match(r'^[a-zA-Z]\:\\.*', path2), path2
- if path1[0].lower() != path2[0].lower():
- return False
- return os.stat(path1).st_dev == os.stat(path2).st_dev
-
-
-def get_free_space(path):
- """Returns the number of free bytes."""
- if sys.platform == 'win32':
- free_bytes = ctypes.c_ulonglong(0)
- ctypes.windll.kernel32.GetDiskFreeSpaceExW(
- ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes))
- return free_bytes.value
- f = os.statvfs(path)
- return f.f_bfree * f.f_frsize
-
-
-def make_temp_dir(prefix, root_dir):
- """Returns a temporary directory on the same file system as root_dir."""
- base_temp_dir = None
- if not is_same_filesystem(root_dir, tempfile.gettempdir()):
- base_temp_dir = os.path.dirname(root_dir)
- return tempfile.mkdtemp(prefix=prefix, dir=base_temp_dir)
-
-
-def load_manifest(content):
- """Verifies the manifest is valid and loads this object with the json data.
- """
- try:
- data = json.loads(content)
- except ValueError:
- raise ConfigError('Failed to parse: %s...' % content[:100])
-
- if not isinstance(data, dict):
- raise ConfigError('Expected dict, got %r' % data)
-
- for key, value in data.iteritems():
- if key == 'command':
- if not isinstance(value, list):
- raise ConfigError('Expected list, got %r' % value)
- for subvalue in value:
- if not isinstance(subvalue, basestring):
- raise ConfigError('Expected string, got %r' % subvalue)
-
- elif key == 'files':
- if not isinstance(value, dict):
- raise ConfigError('Expected dict, got %r' % value)
- for subkey, subvalue in value.iteritems():
- if not isinstance(subkey, basestring):
- raise ConfigError('Expected string, got %r' % subkey)
- if not isinstance(subvalue, dict):
- raise ConfigError('Expected dict, got %r' % subvalue)
- for subsubkey, subsubvalue in subvalue.iteritems():
- if subsubkey == 'link':
- if not isinstance(subsubvalue, basestring):
- raise ConfigError('Expected string, got %r' % subsubvalue)
- elif subsubkey == 'mode':
- if not isinstance(subsubvalue, int):
- raise ConfigError('Expected int, got %r' % subsubvalue)
- elif subsubkey == 'sha-1':
- if not RE_IS_SHA1.match(subsubvalue):
- raise ConfigError('Expected sha-1, got %r' % subsubvalue)
- elif subsubkey == 'size':
- if not isinstance(subsubvalue, int):
- raise ConfigError('Expected int, got %r' % subsubvalue)
- elif subsubkey == 'timestamp':
- if not isinstance(subsubvalue, int):
- raise ConfigError('Expected int, got %r' % subsubvalue)
- elif subsubkey == 'touched_only':
- if not isinstance(subsubvalue, bool):
- raise ConfigError('Expected bool, got %r' % subsubvalue)
- else:
- raise ConfigError('Unknown subsubkey %s' % subsubkey)
- if bool('sha-1' in subvalue) and bool('link' in subvalue):
- raise ConfigError(
- 'Did not expect both \'sha-1\' and \'link\', got: %r' % subvalue)
-
- elif key == 'includes':
- if not isinstance(value, list):
- raise ConfigError('Expected list, got %r' % value)
- for subvalue in value:
- if not RE_IS_SHA1.match(subvalue):
- raise ConfigError('Expected sha-1, got %r' % subvalue)
-
- elif key == 'read_only':
- if not isinstance(value, bool):
- raise ConfigError('Expected bool, got %r' % value)
-
- elif key == 'relative_cwd':
- if not isinstance(value, basestring):
- raise ConfigError('Expected string, got %r' % value)
-
- elif key == 'os':
- if value != get_flavor():
- raise ConfigError(
- 'Expected \'os\' to be \'%s\' but got \'%s\'' %
- (get_flavor(), value))
-
- else:
- raise ConfigError('Unknown key %s' % key)
-
- return data
-
-
-def fix_python_path(cmd):
- """Returns the fixed command line to call the right python executable."""
- out = cmd[:]
- if out[0] == 'python':
- out[0] = sys.executable
- elif out[0].endswith('.py'):
- out.insert(0, sys.executable)
- return out
-
-
-class Profiler(object):
- def __init__(self, name):
- self.name = name
- self.start_time = None
-
- def __enter__(self):
- self.start_time = time.time()
- return self
-
- def __exit__(self, _exc_type, _exec_value, _traceback):
- time_taken = time.time() - self.start_time
- logging.info('Profiling: Section %s took %3.3f seconds',
- self.name, time_taken)
-
-
-class Remote(object):
- """Priority based worker queue to fetch or upload files from a
- content-address server. Any function may be given as the fetcher/upload,
- as long as it takes two inputs (the item contents, and their relative
- destination).
-
- Supports local file system, CIFS or http remotes.
-
- When the priority of items is equals, works in strict FIFO mode.
- """
- # Initial and maximum number of worker threads.
- INITIAL_WORKERS = 2
- MAX_WORKERS = 16
- # Priorities.
- LOW, MED, HIGH = (1<<8, 2<<8, 3<<8)
- INTERNAL_PRIORITY_BITS = (1<<8) - 1
- RETRIES = 5
-
- def __init__(self, destination_root):
- # Function to fetch a remote object or upload to a remote location..
- self._do_item = self.get_file_handler(destination_root)
- # Contains tuple(priority, index, obj, destination).
- self._queue = Queue.PriorityQueue()
- # Contains tuple(priority, index, obj).
- self._done = Queue.PriorityQueue()
-
- # Contains generated exceptions that haven't been handled yet.
- self._exceptions = Queue.Queue()
-
- # To keep FIFO ordering in self._queue. It is assumed xrange's iterator is
- # thread-safe.
- self._next_index = xrange(0, 1<<30).__iter__().next
-
- # Control access to the following member.
- self._ready_lock = threading.Lock()
- # Number of threads in wait state.
- self._ready = 0
-
- # Control access to the following member.
- self._workers_lock = threading.Lock()
- self._workers = []
- for _ in range(self.INITIAL_WORKERS):
- self._add_worker()
-
- def join(self):
- """Blocks until the queue is empty."""
- self._queue.join()
-
- def next_exception(self):
- """Returns the next unhandled exception, or None if there is
- no exception."""
- try:
- return self._exceptions.get_nowait()
- except Queue.Empty:
- return None
-
- def add_item(self, priority, obj, dest):
- """Retrieves an object from the remote data store.
-
- The smaller |priority| gets fetched first.
-
- Thread-safe.
- """
- assert (priority & self.INTERNAL_PRIORITY_BITS) == 0
- self._add_to_queue(priority, obj, dest)
-
- def get_result(self):
- """Returns the next file that was successfully fetched."""
- r = self._done.get()
- if r[0] == -1:
- # It's an exception.
- raise r[2][0], r[2][1], r[2][2]
- return r[2]
-
- def _add_to_queue(self, priority, obj, dest):
- with self._ready_lock:
- start_new_worker = not self._ready
- self._queue.put((priority, self._next_index(), obj, dest))
- if start_new_worker:
- self._add_worker()
-
- def _add_worker(self):
- """Add one worker thread if there isn't too many. Thread-safe."""
- with self._workers_lock:
- if len(self._workers) >= self.MAX_WORKERS:
- return False
- worker = threading.Thread(target=self._run)
- self._workers.append(worker)
- worker.daemon = True
- worker.start()
-
- def _step_done(self, result):
- """Worker helper function"""
- self._done.put(result)
- self._queue.task_done()
- if result[0] == -1:
- self._exceptions.put(sys.exc_info())
-
- def _run(self):
- """Worker thread loop."""
- while True:
- try:
- with self._ready_lock:
- self._ready += 1
- item = self._queue.get()
- finally:
- with self._ready_lock:
- self._ready -= 1
- if not item:
- return
- priority, index, obj, dest = item
- try:
- self._do_item(obj, dest)
- except IOError:
- # Retry a few times, lowering the priority.
- if (priority & self.INTERNAL_PRIORITY_BITS) < self.RETRIES:
- self._add_to_queue(priority + 1, obj, dest)
- self._queue.task_done()
- continue
- # Transfers the exception back. It has maximum priority.
- self._step_done((-1, 0, sys.exc_info()))
- except:
- # Transfers the exception back. It has maximum priority.
- self._step_done((-1, 0, sys.exc_info()))
- else:
- self._step_done((priority, index, obj))
-
- @staticmethod
- def get_file_handler(file_or_url):
- """Returns a object to retrieve objects from a remote."""
- if re.match(r'^https?://.+$', file_or_url):
- file_or_url = file_or_url.rstrip('/') + '/'
- def download_file(item, dest):
- # TODO(maruel): Reuse HTTP connections. The stdlib doesn't make this
- # easy.
- source = file_or_url + item
- logging.debug('download_file(%s, %s)', source, dest)
- urllib.urlretrieve(source, dest)
- return download_file
-
- def copy_file(item, dest):
- source = os.path.join(file_or_url, item)
- logging.debug('copy_file(%s, %s)', source, dest)
- shutil.copy(source, dest)
- return copy_file
-
-
-class CachePolicies(object):
- def __init__(self, max_cache_size, min_free_space, max_items):
- """
- Arguments:
- - max_cache_size: Trim if the cache gets larger than this value. If 0, the
- cache is effectively a leak.
- - min_free_space: Trim if disk free space becomes lower than this value. If
- 0, it unconditionally fill the disk.
- - max_items: Maximum number of items to keep in the cache. If 0, do not
- enforce a limit.
- """
- self.max_cache_size = max_cache_size
- self.min_free_space = min_free_space
- self.max_items = max_items
-
-
-class Cache(object):
- """Stateful LRU cache.
-
- Saves its state as json file.
- """
- STATE_FILE = 'state.json'
-
- def __init__(self, cache_dir, remote, policies):
- """
- Arguments:
- - cache_dir: Directory where to place the cache.
- - remote: Remote where to fetch items from.
- - policies: cache retention policies.
- """
- self.cache_dir = cache_dir
- self.remote = remote
- self.policies = policies
- self.state_file = os.path.join(cache_dir, self.STATE_FILE)
- # The tuple(file, size) are kept as an array in a LRU style. E.g.
- # self.state[0] is the oldest item.
- self.state = []
- # A lookup map to speed up searching.
- self._lookup = {}
- self._dirty = False
-
- # Items currently being fetched. Keep it local to reduce lock contention.
- self._pending_queue = set()
-
- # Profiling values.
- self._added = []
- self._removed = []
- self._free_disk = 0
-
- if not os.path.isdir(self.cache_dir):
- os.makedirs(self.cache_dir)
- if os.path.isfile(self.state_file):
- try:
- self.state = json.load(open(self.state_file, 'r'))
- except (IOError, ValueError), e:
- # Too bad. The file will be overwritten and the cache cleared.
- logging.error(
- 'Broken state file %s, ignoring.\n%s' % (self.STATE_FILE, e))
- if (not isinstance(self.state, list) or
- not all(
- isinstance(i, (list, tuple)) and len(i) == 2 for i in self.state)):
- # Discard.
- self.state = []
- self._dirty = True
-
- # Ensure that all files listed in the state still exist and add new ones.
- previous = set(filename for filename, _ in self.state)
- if len(previous) != len(self.state):
- logging.warn('Cache state is corrupted')
- self._dirty = True
- self.state = []
- else:
- added = 0
- for filename in os.listdir(self.cache_dir):
- if filename == self.STATE_FILE:
- continue
- if filename in previous:
- previous.remove(filename)
- continue
- # An untracked file.
- self._dirty = True
- if not RE_IS_SHA1.match(filename):
- logging.warn('Removing unknown file %s from cache', filename)
- os.remove(self.path(filename))
- else:
- # Insert as the oldest file. It will be deleted eventually if not
- # accessed.
- self._add(filename, False)
- added += 1
- if added:
- logging.warn('Added back %d unknown files', added)
- self.state = [
- (filename, size) for filename, size in self.state
- if filename not in previous
- ]
- self._update_lookup()
-
- with Profiler('SetupTrimming'):
- self.trim()
-
- def __enter__(self):
- return self
-
- def __exit__(self, _exc_type, _exec_value, _traceback):
- with Profiler('CleanupTrimming'):
- self.trim()
-
- logging.info(
- '%4d (%7dkb) added', len(self._added), sum(self._added) / 1024)
- logging.info(
- '%4d (%7dkb) current',
- len(self.state),
- sum(i[1] for i in self.state) / 1024)
- logging.info(
- '%4d (%7dkb) removed', len(self._removed), sum(self._removed) / 1024)
- logging.info('%7dkb free', self._free_disk / 1024)
-
- def remove_lru_file(self):
- """Removes the last recently used file."""
- try:
- filename, size = self.state.pop(0)
- del self._lookup[filename]
- self._removed.append(size)
- os.remove(self.path(filename))
- self._dirty = True
- except OSError as e:
- logging.error('Error attempting to delete a file\n%s' % e)
-
- def trim(self):
- """Trims anything we don't know, make sure enough free space exists."""
- # Ensure maximum cache size.
- if self.policies.max_cache_size and self.state:
- while sum(i[1] for i in self.state) > self.policies.max_cache_size:
- self.remove_lru_file()
-
- # Ensure maximum number of items in the cache.
- if self.policies.max_items and self.state:
- while len(self.state) > self.policies.max_items:
- self.remove_lru_file()
-
- # Ensure enough free space.
- self._free_disk = get_free_space(self.cache_dir)
- while (
- self.policies.min_free_space and
- self.state and
- self._free_disk < self.policies.min_free_space):
- self.remove_lru_file()
- self._free_disk = get_free_space(self.cache_dir)
-
- self.save()
-
- def retrieve(self, priority, item):
- """Retrieves a file from the remote, if not already cached, and adds it to
- the cache.
- """
- assert not '/' in item
- path = self.path(item)
- index = self._lookup.get(item)
- if index is None:
- if item in self._pending_queue:
- # Already pending. The same object could be referenced multiple times.
- return
- self.remote.add_item(priority, item, path)
- self._pending_queue.add(item)
- else:
- if index != len(self.state) - 1:
- # Was already in cache. Update it's LRU value by putting it at the end.
- self.state.append(self.state.pop(index))
- self._dirty = True
- self._update_lookup()
-
- def add(self, filepath, obj):
- """Forcibly adds a file to the cache."""
- if not obj in self._lookup:
- link_file(self.path(obj), filepath, HARDLINK)
- self._add(obj, True)
-
- def path(self, item):
- """Returns the path to one item."""
- return os.path.join(self.cache_dir, item)
-
- def save(self):
- """Saves the LRU ordering."""
- json.dump(self.state, open(self.state_file, 'wb'), separators=(',',':'))
-
- def wait_for(self, items):
- """Starts a loop that waits for at least one of |items| to be retrieved.
-
- Returns the first item retrieved.
- """
- # Flush items already present.
- for item in items:
- if item in self._lookup:
- return item
-
- assert all(i in self._pending_queue for i in items), (
- items, self._pending_queue)
- # Note that:
- # len(self._pending_queue) ==
- # ( len(self.remote._workers) - self.remote._ready +
- # len(self._remote._queue) + len(self._remote.done))
- # There is no lock-free way to verify that.
- while self._pending_queue:
- item = self.remote.get_result()
- self._pending_queue.remove(item)
- self._add(item, True)
- if item in items:
- return item
-
- def _add(self, item, at_end):
- """Adds an item in the internal state.
-
- If |at_end| is False, self._lookup becomes inconsistent and
- self._update_lookup() must be called.
- """
- size = os.stat(self.path(item)).st_size
- self._added.append(size)
- if at_end:
- self.state.append((item, size))
- self._lookup[item] = len(self.state) - 1
- else:
- self.state.insert(0, (item, size))
- self._dirty = True
-
- def _update_lookup(self):
- self._lookup = dict(
- (filename, index) for index, (filename, _) in enumerate(self.state))
-
-
-
-class Manifest(object):
- """Represents a single parsed manifest, e.g. a .results file."""
- def __init__(self, obj_hash):
- """|obj_hash| is really the sha-1 of the file."""
- logging.debug('Manifest(%s)' % obj_hash)
- self.obj_hash = obj_hash
- # Set once all the left-side of the tree is parsed. 'Tree' here means the
- # manifest and all the manifest recursively included by it with 'includes'
- # key. The order of each manifest sha-1 in 'includes' is important, as the
- # later ones are not processed until the firsts are retrieved and read.
- self.can_fetch = False
-
- # Raw data.
- self.data = {}
- # A Manifest instance, one per object in self.includes.
- self.children = []
-
- # Set once the manifest is loaded.
- self._manifest_parsed = False
- # Set once the files are fetched.
- self.files_fetched = False
-
- def load(self, content):
- """Verifies the manifest is valid and loads this object with the json data.
- """
- logging.debug('Manifest.load(%s)' % self.obj_hash)
- assert not self._manifest_parsed
- self.data = load_manifest(content)
- self.children = [Manifest(i) for i in self.data.get('includes', [])]
- self._manifest_parsed = True
-
- def fetch_files(self, cache, files):
- """Adds files in this manifest not present in files dictionary.
-
- Preemptively request files.
-
- Note that |files| is modified by this function.
- """
- assert self.can_fetch
- if not self._manifest_parsed or self.files_fetched:
- return
- logging.debug('fetch_files(%s)' % self.obj_hash)
- for filepath, properties in self.data.get('files', {}).iteritems():
- # Root manifest has priority on the files being mapped. In particular,
- # overriden files must not be fetched.
- if filepath not in files:
- files[filepath] = properties
- if 'sha-1' in properties:
- # Preemptively request files.
- logging.debug('fetching %s' % filepath)
- cache.retrieve(Remote.MED, properties['sha-1'])
- self.files_fetched = True
-
-
-class Settings(object):
- """Results of a completely parsed manifest."""
- def __init__(self):
- self.command = []
- self.files = {}
- self.read_only = None
- self.relative_cwd = None
- # The main manifest.
- self.root = None
- logging.debug('Settings')
-
- def load(self, cache, root_manifest_hash):
- """Loads the manifest and all the included manifests asynchronously.
-
- It enables support for included manifest. They are processed in strict order
- but fetched asynchronously from the cache. This is important so that a file
- in an included manifest that is overridden by an embedding manifest is not
- fetched neededlessly. The includes are fetched in one pass and the files are
- fetched as soon as all the manifests on the left-side of the tree were
- fetched.
-
- The prioritization is very important here for nested manifests. 'includes'
- have the highest priority and the algorithm is optimized for both deep and
- wide manifests. A deep one is a long link of manifest referenced one at a
- time by one item in 'includes'. A wide one has a large number of 'includes'
- in a single manifest. 'left' is defined as an included manifest earlier in
- the 'includes' list. So the order of the elements in 'includes' is
- important.
- """
- self.root = Manifest(root_manifest_hash)
- cache.retrieve(Remote.HIGH, root_manifest_hash)
- pending = {root_manifest_hash: self.root}
- # Keeps the list of retrieved items to refuse recursive includes.
- retrieved = [root_manifest_hash]
-
- def update_self(node):
- node.fetch_files(cache, self.files)
- # Grabs properties.
- if not self.command and node.data.get('command'):
- self.command = node.data['command']
- if self.read_only is None and node.data.get('read_only') is not None:
- self.read_only = node.data['read_only']
- if (self.relative_cwd is None and
- node.data.get('relative_cwd') is not None):
- self.relative_cwd = node.data['relative_cwd']
-
- def traverse_tree(node):
- if node.can_fetch:
- if not node.files_fetched:
- update_self(node)
- will_break = False
- for i in node.children:
- if not i.can_fetch:
- if will_break:
- break
- # Automatically mark the first one as fetcheable.
- i.can_fetch = True
- will_break = True
- traverse_tree(i)
-
- while pending:
- item_hash = cache.wait_for(pending)
- item = pending.pop(item_hash)
- item.load(open(cache.path(item_hash), 'r').read())
- if item_hash == root_manifest_hash:
- # It's the root item.
- item.can_fetch = True
-
- for new_child in item.children:
- h = new_child.obj_hash
- if h in retrieved:
- raise ConfigError('Manifest %s is retrieved recursively' % h)
- pending[h] = new_child
- cache.retrieve(Remote.HIGH, h)
-
- # Traverse the whole tree to see if files can now be fetched.
- traverse_tree(self.root)
- def check(n):
- return all(check(x) for x in n.children) and n.files_fetched
- assert check(self.root)
- self.relative_cwd = self.relative_cwd or ''
- self.read_only = self.read_only or False
-
-
-def run_tha_test(manifest_hash, cache_dir, remote, policies):
- """Downloads the dependencies in the cache, hardlinks them into a temporary
- directory and runs the executable.
- """
- settings = Settings()
- with Cache(cache_dir, Remote(remote), policies) as cache:
- outdir = make_temp_dir('run_tha_test', cache_dir)
- try:
- # Initiate all the files download.
- with Profiler('GetManifests') as _prof:
- # Optionally support local files.
- if not RE_IS_SHA1.match(manifest_hash):
- # Adds it in the cache. While not strictly necessary, this simplifies
- # the rest.
- h = hashlib.sha1(open(manifest_hash, 'r').read()).hexdigest()
- cache.add(manifest_hash, h)
- manifest_hash = h
- settings.load(cache, manifest_hash)
-
- if not settings.command:
- print >> sys.stderr, 'No command to run'
- return 1
-
- with Profiler('GetRest') as _prof:
- logging.debug('Creating directories')
- # Creates the tree of directories to create.
- directories = set(os.path.dirname(f) for f in settings.files)
- for item in list(directories):
- while item:
- directories.add(item)
- item = os.path.dirname(item)
- for d in sorted(directories):
- if d:
- os.mkdir(os.path.join(outdir, d))
-
- # Creates the links if necessary.
- for filepath, properties in settings.files.iteritems():
- if 'link' not in properties:
- continue
- outfile = os.path.join(outdir, filepath)
- os.symlink(properties['link'], outfile)
- if 'mode' in properties:
- # It's not set on Windows.
- os.chmod(outfile, properties['mode'])
-
- # Remaining files to be processed.
- # Note that files could still be not be downloaded yet here.
- remaining = dict()
- for filepath, props in settings.files.iteritems():
- if 'sha-1' in props:
- remaining.setdefault(props['sha-1'], []).append((filepath, props))
-
- # Do bookkeeping while files are being downloaded in the background.
- cwd = os.path.join(outdir, settings.relative_cwd)
- if not os.path.isdir(cwd):
- os.makedirs(cwd)
- cmd = settings.command[:]
- # Ensure paths are correctly separated on windows.
- cmd[0] = cmd[0].replace('/', os.path.sep)
- cmd = fix_python_path(cmd)
-
- # Now block on the remaining files to be downloaded and mapped.
- while remaining:
- obj = cache.wait_for(remaining)
- for filepath, properties in remaining.pop(obj):
- outfile = os.path.join(outdir, filepath)
- link_file(outfile, cache.path(obj), HARDLINK)
- if 'mode' in properties:
- # It's not set on Windows.
- os.chmod(outfile, properties['mode'])
-
- if settings.read_only:
- make_writable(outdir, True)
- logging.info('Running %s, cwd=%s' % (cmd, cwd))
- try:
- with Profiler('RunTest') as _prof:
- return subprocess.call(cmd, cwd=cwd)
- except OSError:
- print >> sys.stderr, 'Failed to run %s; cwd=%s' % (cmd, cwd)
- raise
- finally:
- rmtree(outdir)
-
-
-def main():
- parser = optparse.OptionParser(
- usage='%prog <options>', description=sys.modules[__name__].__doc__)
- parser.add_option(
- '-v', '--verbose', action='count', default=0, help='Use multiple times')
- parser.add_option('--no-run', action='store_true', help='Skip the run part')
-
- group = optparse.OptionGroup(parser, 'Data source')
- group.add_option(
- '-m', '--manifest',
- metavar='FILE',
- help='File/url describing what to map or run')
- group.add_option(
- '-H', '--hash',
- help='Hash of the manifest to grab from the hash table')
- parser.add_option_group(group)
-
- group.add_option(
- '-r', '--remote', metavar='URL', help='Remote where to get the items')
- group = optparse.OptionGroup(parser, 'Cache management')
- group.add_option(
- '--cache',
- default='cache',
- metavar='DIR',
- help='Cache directory, default=%default')
- group.add_option(
- '--max-cache-size',
- type='int',
- metavar='NNN',
- default=20*1024*1024*1024,
- help='Trim if the cache gets larger than this value, default=%default')
- group.add_option(
- '--min-free-space',
- type='int',
- metavar='NNN',
- default=1*1024*1024*1024,
- help='Trim if disk free space becomes lower than this value, '
- 'default=%default')
- group.add_option(
- '--max-items',
- type='int',
- metavar='NNN',
- default=100000,
- help='Trim if more than this number of items are in the cache '
- 'default=%default')
- parser.add_option_group(group)
-
- options, args = parser.parse_args()
- level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)]
- logging.basicConfig(
- level=level,
- format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s')
-
- if bool(options.manifest) == bool(options.hash):
- parser.error('One and only one of --manifest or --hash is required.')
- if not options.remote:
- parser.error('--remote is required.')
- if args:
- parser.error('Unsupported args %s' % ' '.join(args))
-
- policies = CachePolicies(
- options.max_cache_size, options.min_free_space, options.max_items)
- try:
- return run_tha_test(
- options.manifest or options.hash,
- os.path.abspath(options.cache),
- options.remote,
- policies)
- except (ConfigError, MappingError), e:
- print >> sys.stderr, str(e)
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/shard_test_cases.py b/tools/isolate/shard_test_cases.py
deleted file mode 100755
index dc8b782..0000000
--- a/tools/isolate/shard_test_cases.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Runs a google-test shard.
-
-This makes a simple interface to run a shard on the command line independent of
-the interpreter, e.g. cmd.exe vs bash.
-"""
-
-import optparse
-import os
-import subprocess
-import sys
-
-
-def fix_python_path(cmd):
- """Returns the fixed command line to call the right python executable."""
- out = cmd[:]
- if out[0] == 'python':
- out[0] = sys.executable
- elif out[0].endswith('.py'):
- out.insert(0, sys.executable)
- return out
-
-
-def main():
- parser = optparse.OptionParser(usage='%prog <options> [gtest]')
- parser.disable_interspersed_args()
- parser.add_option(
- '-I', '--index',
- type='int',
- default=os.environ.get('GTEST_SHARD_INDEX'),
- help='Shard index to run')
- parser.add_option(
- '-S', '--shards',
- type='int',
- default=os.environ.get('GTEST_TOTAL_SHARDS'),
- help='Total number of shards to calculate from the --index to run')
- options, args = parser.parse_args()
- env = os.environ.copy()
- env['GTEST_TOTAL_SHARDS'] = str(options.shards)
- env['GTEST_SHARD_INDEX'] = str(options.index)
- return subprocess.call(fix_python_path(args), env=env)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/gtest_fake/__init__.py b/tools/isolate/tests/gtest_fake/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/isolate/tests/gtest_fake/__init__.py
+++ /dev/null
diff --git a/tools/isolate/tests/gtest_fake/gtest_fake_base.py b/tools/isolate/tests/gtest_fake/gtest_fake_base.py
deleted file mode 100644
index 74cc4d2..0000000
--- a/tools/isolate/tests/gtest_fake/gtest_fake_base.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Provide common functionality to simulate a google-test executable.
-
-http://code.google.com/p/googletest/
-"""
-
-def get_test_output(test_name):
- fixture, case = test_name.split('.', 1)
- return (
- '[==========] Running 1 test from 1 test case.\n'
- '[----------] Global test environment set-up.\n'
- '[----------] 1 test from %(fixture)s\n'
- '[ RUN ] %(fixture)s.%(case)s\n'
- '[ OK ] %(fixture)s.%(case)s (0 ms)\n'
- '[----------] 1 test from %(fixture)s (0 ms total)\n'
- '\n') % {
- 'fixture': fixture,
- 'case': case,
- }
-
-
-def get_footer(number, total):
- return (
- '[----------] Global test environment tear-down\n'
- '[==========] %(number)d test from %(total)d test case ran. (0 ms total)\n'
- '[ PASSED ] %(number)d test.\n'
- '\n'
- ' YOU HAVE 5 DISABLED TESTS\n'
- '\n'
- ' YOU HAVE 2 tests with ignored failures (FAILS prefix)\n') % {
- 'number': number,
- 'total': total,
- }
diff --git a/tools/isolate/tests/gtest_fake/gtest_fake_error.py b/tools/isolate/tests/gtest_fake/gtest_fake_error.py
deleted file mode 100755
index 73a7cb5..0000000
--- a/tools/isolate/tests/gtest_fake/gtest_fake_error.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Simulate a google-test executable that has an error when listing tests.
-
-http://code.google.com/p/googletest/
-"""
-
-import optparse
-import sys
-
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option('--gtest_list_tests', action='store_true')
- parser.add_option('--gtest_filter')
- options, args = parser.parse_args()
- if args:
- parser.error('Failed to process args %s' % args)
-
- if options.gtest_list_tests:
- sys.stderr.write('Unable to list tests')
- return 1
-
- sys.stderr.write('Unable to run tests')
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/gtest_fake/gtest_fake_fail.py b/tools/isolate/tests/gtest_fake/gtest_fake_fail.py
deleted file mode 100755
index 55bdd5b..0000000
--- a/tools/isolate/tests/gtest_fake/gtest_fake_fail.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Simulate a failing google-test executable.
-
-http://code.google.com/p/googletest/
-"""
-
-import optparse
-import sys
-
-import gtest_fake_base
-
-
-TESTS = {
- 'Foo': ['Bar1', 'Bar2', 'Bar3'],
- 'Baz': ['Fail'],
-}
-TOTAL = sum(len(v) for v in TESTS.itervalues())
-
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option('--gtest_list_tests', action='store_true')
- parser.add_option('--gtest_filter')
- options, args = parser.parse_args()
- if args:
- parser.error('Failed to process args %s' % args)
-
- if options.gtest_list_tests:
- for fixture, cases in TESTS.iteritems():
- print '%s.' % fixture
- for case in cases:
- print ' ' + case
- print ' YOU HAVE 2 tests with ignored failures (FAILS prefix)'
- print ''
- return 0
-
- if options.gtest_filter:
- # Simulate running one test.
- print 'Note: Google Test filter = %s\n' % options.gtest_filter
- print gtest_fake_base.get_test_output(options.gtest_filter)
- print gtest_fake_base.get_footer(1, 1)
- # Make Baz.Fail fail.
- return options.gtest_filter == 'Baz.Fail'
-
- for fixture, cases in TESTS.iteritems():
- for case in cases:
- print gtest_fake_base.get_test_output('%s.%s' % (fixture, case))
- print gtest_fake_base.get_footer(TOTAL, TOTAL)
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/gtest_fake/gtest_fake_pass.py b/tools/isolate/tests/gtest_fake/gtest_fake_pass.py
deleted file mode 100755
index 534b3e8..0000000
--- a/tools/isolate/tests/gtest_fake/gtest_fake_pass.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Simulate a passing google-test executable.
-
-http://code.google.com/p/googletest/
-"""
-
-import optparse
-import sys
-
-import gtest_fake_base
-
-
-TESTS = {
- 'Foo': ['Bar1', 'Bar2', 'Bar3'],
-}
-TOTAL = sum(len(v) for v in TESTS.itervalues())
-
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option('--gtest_list_tests', action='store_true')
- parser.add_option('--gtest_filter')
- options, args = parser.parse_args()
- if args:
- parser.error('Failed to process args %s' % args)
-
- if options.gtest_list_tests:
- for fixture, cases in TESTS.iteritems():
- print '%s.' % fixture
- for case in cases:
- print ' ' + case
- print ' YOU HAVE 2 tests with ignored failures (FAILS prefix)'
- print ''
- return 0
-
- if options.gtest_filter:
- # Simulate running one test.
- print 'Note: Google Test filter = %s\n' % options.gtest_filter
- print gtest_fake_base.get_test_output(options.gtest_filter)
- print gtest_fake_base.get_footer(1, 1)
- return 0
-
- for fixture, cases in TESTS.iteritems():
- for case in cases:
- print gtest_fake_base.get_test_output('%s.%s' % (fixture, case))
- print gtest_fake_base.get_footer(TOTAL, TOTAL)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate/fail.isolate b/tools/isolate/tests/isolate/fail.isolate
deleted file mode 100644
index eeeb97b..0000000
--- a/tools/isolate/tests/isolate/fail.isolate
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'command': [
- 'python',
- 'fail.py',
- ],
- 'isolate_dependency_tracked': [
- 'fail.py',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/fail.py b/tools/isolate/tests/isolate/fail.py
deleted file mode 100755
index a82098a..0000000
--- a/tools/isolate/tests/isolate/fail.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-
-def main():
- print 'Failing'
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate/files1/subdir/42.txt b/tools/isolate/tests/isolate/files1/subdir/42.txt
deleted file mode 100644
index 5d343c8..0000000
--- a/tools/isolate/tests/isolate/files1/subdir/42.txt
+++ /dev/null
@@ -1 +0,0 @@
-the answer to life the universe and everything
diff --git a/tools/isolate/tests/isolate/files1/test_file1.txt b/tools/isolate/tests/isolate/files1/test_file1.txt
deleted file mode 100644
index bc56c4d..0000000
--- a/tools/isolate/tests/isolate/files1/test_file1.txt
+++ /dev/null
@@ -1 +0,0 @@
-Foo
diff --git a/tools/isolate/tests/isolate/files1/test_file2.txt b/tools/isolate/tests/isolate/files1/test_file2.txt
deleted file mode 100644
index ebd7525..0000000
--- a/tools/isolate/tests/isolate/files1/test_file2.txt
+++ /dev/null
@@ -1 +0,0 @@
-Bar
diff --git a/tools/isolate/tests/isolate/files2 b/tools/isolate/tests/isolate/files2
deleted file mode 120000
index 49a73ae..0000000
--- a/tools/isolate/tests/isolate/files2
+++ /dev/null
@@ -1 +0,0 @@
-files1 \ No newline at end of file
diff --git a/tools/isolate/tests/isolate/missing_trailing_slash.isolate b/tools/isolate/tests/isolate/missing_trailing_slash.isolate
deleted file mode 100644
index 676d102..0000000
--- a/tools/isolate/tests/isolate/missing_trailing_slash.isolate
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'isolate_dependency_untracked': [
- # Directory missing trailing slash.
- 'files1',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/no_run.isolate b/tools/isolate/tests/isolate/no_run.isolate
deleted file mode 100644
index 20a1659..0000000
--- a/tools/isolate/tests/isolate/no_run.isolate
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'isolate_dependency_tracked': [
- # Includes itself.
- 'no_run.isolate',
- ],
- 'isolate_dependency_untracked': [
- 'files1/',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/non_existent.isolate b/tools/isolate/tests/isolate/non_existent.isolate
deleted file mode 100644
index 8e344d9..0000000
--- a/tools/isolate/tests/isolate/non_existent.isolate
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'isolate_dependency_tracked': [
- 'A_file_that_do_not_exist',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/symlink_full.isolate b/tools/isolate/tests/isolate/symlink_full.isolate
deleted file mode 100644
index 4b06906..0000000
--- a/tools/isolate/tests/isolate/symlink_full.isolate
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'command': [
- 'python',
- 'symlink_full.py',
- ],
- 'isolate_dependency_tracked': [
- 'symlink_full.py',
- ],
- 'isolate_dependency_untracked': [
- 'files2/',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/symlink_full.py b/tools/isolate/tests/isolate/symlink_full.py
deleted file mode 100755
index fa9865c..0000000
--- a/tools/isolate/tests/isolate/symlink_full.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
- print 'symlink: touches files2/'
- assert len(sys.argv) == 1
-
- expected = {
- os.path.join('subdir', '42.txt'):
- 'the answer to life the universe and everything\n',
- 'test_file1.txt': 'Foo\n',
- 'test_file2.txt': 'Bar\n',
- }
-
- root = 'files2'
- actual = {}
- for relroot, dirnames, filenames in os.walk(root):
- for filename in filenames:
- fullpath = os.path.join(relroot, filename)
- actual[fullpath[len(root)+1:]] = open(fullpath, 'rb').read()
- if '.svn' in dirnames:
- dirnames.remove('.svn')
-
- if actual != expected:
- print 'Failure'
- print actual
- print expected
- return 1
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate/symlink_partial.isolate b/tools/isolate/tests/isolate/symlink_partial.isolate
deleted file mode 100644
index 750889e..0000000
--- a/tools/isolate/tests/isolate/symlink_partial.isolate
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'command': [
- 'python',
- 'symlink_partial.py',
- ],
- 'isolate_dependency_tracked': [
- 'symlink_partial.py',
- ],
- 'isolate_dependency_untracked': [
- 'files2/test_file2.txt',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/symlink_partial.py b/tools/isolate/tests/isolate/symlink_partial.py
deleted file mode 100755
index d0437e5..0000000
--- a/tools/isolate/tests/isolate/symlink_partial.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
- print 'symlink: touches files2/test_file2.txt'
- assert len(sys.argv) == 1
-
- if 'Bar\n' != open(os.path.join('files2', 'test_file2.txt'), 'rb').read():
- print 'Failed'
- return 1
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate/touch_only.isolate b/tools/isolate/tests/isolate/touch_only.isolate
deleted file mode 100644
index a134096..0000000
--- a/tools/isolate/tests/isolate/touch_only.isolate
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'command': [
- 'python',
- 'touch_only.py',
- '<(FLAG)',
- ],
- 'isolate_dependency_tracked': [
- 'touch_only.py',
- ],
- 'isolate_dependency_touched': [
- 'files1/test_file1.txt',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/touch_only.py b/tools/isolate/tests/isolate/touch_only.py
deleted file mode 100755
index bb9954a..0000000
--- a/tools/isolate/tests/isolate/touch_only.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
- print 'touch_only: verify the file is empty.'
- assert len(sys.argv) == 2
- mode = sys.argv[1]
- assert mode in ('run', 'trace')
-
- size = os.stat(os.path.join('files1', 'test_file1.txt')).st_size
- if mode == 'run':
- # The file must be empty.
- if size:
- print 'Unexpected content'
- return 1
- else:
- # The file must be non-empty.
- if not size:
- print 'Unexpected emptiness'
- return 1
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate/touch_root.isolate b/tools/isolate/tests/isolate/touch_root.isolate
deleted file mode 100644
index ed1356d..0000000
--- a/tools/isolate/tests/isolate/touch_root.isolate
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'command': [
- 'python',
- 'touch_root.py',
- ],
- 'isolate_dependency_tracked': [
- '../../isolate.py',
- 'touch_root.py',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/touch_root.py b/tools/isolate/tests/isolate/touch_root.py
deleted file mode 100755
index ddae3a6..0000000
--- a/tools/isolate/tests/isolate/touch_root.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
- print 'child_touch_root: Verify the relative directories'
- root_dir = os.path.dirname(os.path.abspath(__file__))
- parent_dir, base = os.path.split(root_dir)
- parent_dir, base2 = os.path.split(parent_dir)
- if base != 'isolate' or base2 != 'tests':
- print 'Invalid root dir %s' % root_dir
- return 4
-
- open(os.path.join(parent_dir, 'isolate.py'), 'r').close()
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate/with_flag.isolate b/tools/isolate/tests/isolate/with_flag.isolate
deleted file mode 100644
index 77c18d2..0000000
--- a/tools/isolate/tests/isolate/with_flag.isolate
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'variables': {
- 'command': [
- 'python',
- 'with_flag.py',
- '<(FLAG)',
- ],
- 'isolate_dependency_tracked': [
- 'with_flag.py',
- ],
- 'isolate_dependency_untracked': [
- 'files1/',
- ],
- },
-}
diff --git a/tools/isolate/tests/isolate/with_flag.py b/tools/isolate/tests/isolate/with_flag.py
deleted file mode 100755
index 695f98c..0000000
--- a/tools/isolate/tests/isolate/with_flag.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
- print 'with_flag: Verify the test data files were mapped properly'
- assert len(sys.argv) == 2
- mode = sys.argv[1]
- assert mode in ('run', 'trace')
- expected = {
- os.path.join('subdir', '42.txt'):
- 'the answer to life the universe and everything\n',
- 'test_file1.txt': 'Foo\n',
- 'test_file2.txt': 'Bar\n',
- }
-
- root = 'files1'
- actual = {}
- for relroot, dirnames, filenames in os.walk(root):
- for filename in filenames:
- fullpath = os.path.join(relroot, filename)
- actual[fullpath[len(root)+1:]] = open(fullpath, 'rb').read()
- if mode == 'trace' and '.svn' in dirnames:
- dirnames.remove('.svn')
-
- if actual != expected:
- print 'Failure'
- print actual
- print expected
- return 1
-
- root_dir = os.path.dirname(os.path.abspath(__file__))
- parent_dir, base = os.path.split(root_dir)
- if mode == 'trace':
- # Verify the parent directory.
- parent_dir, base2 = os.path.split(parent_dir)
- if base != 'isolate' or base2 != 'tests':
- print 'mode trace: Invalid root dir %s' % root_dir
- return 4
- else:
- # Verify that we are not inside a checkout.
- if base == 'tests':
- print 'mode run: Invalid root dir %s' % root_dir
- return 5
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/isolate_smoke_test.py b/tools/isolate/tests/isolate_smoke_test.py
deleted file mode 100755
index 0278558..0000000
--- a/tools/isolate/tests/isolate_smoke_test.py
+++ /dev/null
@@ -1,1001 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cStringIO
-import hashlib
-import json
-import logging
-import os
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-import isolate
-import run_test_from_archive
-
-VERBOSE = False
-SHA_1_NULL = hashlib.sha1().hexdigest()
-
-
-# Keep the list hard coded.
-EXPECTED_MODES = (
- 'check',
- 'hashtable',
- 'help',
- 'noop',
- 'merge',
- 'read',
- 'remap',
- 'run',
- 'trace',
-)
-
-# These are per test case, not per mode.
-RELATIVE_CWD = {
- 'fail': '.',
- 'missing_trailing_slash': '.',
- 'no_run': '.',
- 'non_existent': '.',
- 'symlink_full': '.',
- 'symlink_partial': '.',
- 'touch_only': '.',
- 'touch_root': os.path.join('tests', 'isolate'),
- 'with_flag': '.',
-}
-
-DEPENDENCIES = {
- 'fail': ['fail.py'],
- 'missing_trailing_slash': [],
- 'no_run': [
- 'no_run.isolate',
- os.path.join('files1', 'subdir', '42.txt'),
- os.path.join('files1', 'test_file1.txt'),
- os.path.join('files1', 'test_file2.txt'),
- ],
- 'non_existent': [],
- 'symlink_full': [
- os.path.join('files1', 'subdir', '42.txt'),
- os.path.join('files1', 'test_file1.txt'),
- os.path.join('files1', 'test_file2.txt'),
- # files2 is a symlink to files1.
- 'files2',
- 'symlink_full.py',
- ],
- 'symlink_partial': [
- os.path.join('files1', 'test_file2.txt'),
- # files2 is a symlink to files1.
- 'files2',
- 'symlink_partial.py',
- ],
- 'touch_only': [
- 'touch_only.py',
- os.path.join('files1', 'test_file1.txt'),
- ],
- 'touch_root': [
- os.path.join('tests', 'isolate', 'touch_root.py'),
- 'isolate.py',
- ],
- 'with_flag': [
- 'with_flag.py',
- os.path.join('files1', 'subdir', '42.txt'),
- os.path.join('files1', 'test_file1.txt'),
- os.path.join('files1', 'test_file2.txt'),
- ],
-}
-
-
-class CalledProcessError(subprocess.CalledProcessError):
- """Makes 2.6 version act like 2.7"""
- def __init__(self, returncode, cmd, output, stderr, cwd):
- super(CalledProcessError, self).__init__(returncode, cmd)
- self.output = output
- self.stderr = stderr
- self.cwd = cwd
-
- def __str__(self):
- return super(CalledProcessError, self).__str__() + (
- '\n'
- 'cwd=%s\n%s\n%s\n%s') % (
- self.cwd,
- self.output,
- self.stderr,
- ' '.join(self.cmd))
-
-
-def list_files_tree(directory):
- """Returns the list of all the files in a tree."""
- actual = []
- for root, dirnames, filenames in os.walk(directory):
- actual.extend(os.path.join(root, f)[len(directory)+1:] for f in filenames)
- for dirname in dirnames:
- full = os.path.join(root, dirname)
- # Manually include symlinks.
- if os.path.islink(full):
- actual.append(full[len(directory)+1:])
- return sorted(actual)
-
-
-def calc_sha1(filepath):
- """Calculates the SHA-1 hash for a file."""
- return hashlib.sha1(open(filepath, 'rb').read()).hexdigest()
-
-
-class IsolateBase(unittest.TestCase):
- # To be defined by the subclass, it defines the amount of meta data saved by
- # isolate.py for each file. Should be one of (NO_INFO, STATS_ONLY, WITH_HASH).
- LEVEL = None
-
- def setUp(self):
- # The tests assume the current directory is the file's directory.
- os.chdir(ROOT_DIR)
- self.tempdir = tempfile.mkdtemp()
- self.result = os.path.join(self.tempdir, 'isolate_smoke_test.results')
- self.outdir = os.path.join(self.tempdir, 'isolated')
-
- def tearDown(self):
- logging.debug(self.tempdir)
- shutil.rmtree(self.tempdir)
-
- @staticmethod
- def _isolate_dict_to_string(values):
- buf = cStringIO.StringIO()
- isolate.pretty_print(values, buf)
- return buf.getvalue()
-
- @classmethod
- def _wrap_in_condition(cls, variables):
- """Wraps a variables dict inside the current OS condition.
-
- Returns the equivalent string.
- """
- return cls._isolate_dict_to_string(
- {
- 'conditions': [
- ['OS=="%s"' % isolate.get_flavor(), {
- 'variables': variables
- }],
- ],
- })
-
-
-class IsolateModeBase(IsolateBase):
- def _expect_no_tree(self):
- self.assertFalse(os.path.exists(self.outdir))
-
- def _result_tree(self):
- return list_files_tree(self.outdir)
-
- def _expected_tree(self):
- """Verifies the files written in the temporary directory."""
- self.assertEquals(sorted(DEPENDENCIES[self.case()]), self._result_tree())
-
- @staticmethod
- def _fix_file_mode(filename, read_only):
- """4 modes are supported, 0750 (rwx), 0640 (rw), 0550 (rx), 0440 (r)."""
- min_mode = 0440
- if not read_only:
- min_mode |= 0200
- return (min_mode | 0110) if filename.endswith('.py') else min_mode
-
- def _gen_files(self, read_only, empty_file):
- """Returns a dict of files like calling isolate.process_input() on each
- file.
- """
- root_dir = ROOT_DIR
- if RELATIVE_CWD[self.case()] == '.':
- root_dir = os.path.join(root_dir, 'tests', 'isolate')
-
- files = dict((unicode(f), {}) for f in DEPENDENCIES[self.case()])
-
- for relfile, v in files.iteritems():
- filepath = os.path.join(root_dir, relfile)
- if self.LEVEL >= isolate.STATS_ONLY:
- filestats = os.lstat(filepath)
- is_link = stat.S_ISLNK(filestats.st_mode)
- if not is_link:
- v[u'size'] = filestats.st_size
- if isolate.get_flavor() != 'win':
- v[u'mode'] = self._fix_file_mode(relfile, read_only)
- else:
- v[u'mode'] = 488
- # Used the skip recalculating the hash. Use the most recent update
- # time.
- v[u'timestamp'] = int(round(filestats.st_mtime))
- if is_link:
- v['link'] = os.readlink(filepath)
-
- if self.LEVEL >= isolate.WITH_HASH:
- if not is_link:
- # Upgrade the value to unicode so diffing the structure in case of
- # test failure is easier, since the basestring type must match,
- # str!=unicode.
- v[u'sha-1'] = unicode(calc_sha1(filepath))
-
- if empty_file:
- item = files[empty_file]
- item['sha-1'] = unicode(SHA_1_NULL)
- if sys.platform != 'win32':
- item['mode'] = 288
- item['size'] = 0
- item['touched_only'] = True
- item.pop('timestamp', None)
- return files
-
- def _expected_result(self, args, read_only, empty_file):
- """Verifies self.result contains the expected data."""
- expected = {
- u'files': self._gen_files(read_only, empty_file),
- u'os': isolate.get_flavor(),
- u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
- }
- if read_only is not None:
- expected[u'read_only'] = read_only
- if args:
- expected[u'command'] = [u'python'] + [unicode(x) for x in args]
- else:
- expected[u'command'] = []
- self.assertEquals(expected, json.load(open(self.result, 'r')))
-
- def _expected_saved_state(self, extra_vars):
- flavor = isolate.get_flavor()
- expected = {
- u'isolate_file': unicode(self.filename()),
- u'variables': {
- u'EXECUTABLE_SUFFIX': '.exe' if flavor == 'win' else '',
- u'OS': unicode(flavor),
- },
- }
- expected['variables'].update(extra_vars or {})
- self.assertEquals(expected, json.load(open(self.saved_state(), 'r')))
-
- def _expect_results(self, args, read_only, extra_vars, empty_file):
- self._expected_result(args, read_only, empty_file)
- self._expected_saved_state(extra_vars)
- # Also verifies run_test_from_archive.py will be able to read it.
- run_test_from_archive.load_manifest(open(self.result, 'r').read())
-
- def _expect_no_result(self):
- self.assertFalse(os.path.exists(self.result))
-
- def _execute(self, mode, case, args, need_output):
- """Executes isolate.py."""
- self.assertEquals(
- case,
- self.case() + '.isolate',
- 'Rename the test case to test_%s()' % case)
- cmd = [
- sys.executable, os.path.join(ROOT_DIR, 'isolate.py'),
- mode,
- '--result', self.result,
- '--outdir', self.outdir,
- '--isolate', self.filename(),
- ]
- cmd.extend(args)
-
- env = os.environ.copy()
- if 'ISOLATE_DEBUG' in env:
- del env['ISOLATE_DEBUG']
-
- if need_output or not VERBOSE:
- stdout = subprocess.PIPE
- stderr = subprocess.PIPE
- else:
- cmd.extend(['-v'] * 3)
- stdout = None
- stderr = None
-
- logging.debug(cmd)
- cwd = ROOT_DIR
- p = subprocess.Popen(
- cmd,
- stdout=stdout,
- stderr=stderr,
- cwd=cwd,
- env=env,
- universal_newlines=True)
- out, err = p.communicate()
- if p.returncode:
- raise CalledProcessError(p.returncode, cmd, out, err, cwd)
-
- # Do not check on Windows since a lot of spew is generated there.
- if sys.platform != 'win32':
- self.assertTrue(err in (None, ''), err)
- return out
-
- def case(self):
- """Returns the filename corresponding to this test case."""
- test_id = self.id().split('.')
- return re.match('^test_([a-z_]+)$', test_id[2]).group(1)
-
- def filename(self):
- """Returns the filename corresponding to this test case."""
- filename = os.path.join(
- ROOT_DIR, 'tests', 'isolate', self.case() + '.isolate')
- self.assertTrue(os.path.isfile(filename), filename)
- return filename
-
- def saved_state(self):
- return isolate.result_to_state(self.result)
-
-
-class Isolate(unittest.TestCase):
- # Does not inherit from the other *Base classes.
- def test_help_modes(self):
- # Check coherency in the help and implemented modes.
- p = subprocess.Popen(
- [sys.executable, os.path.join(ROOT_DIR, 'isolate.py'), '--help'],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- cwd=ROOT_DIR)
- out = p.communicate()[0].splitlines()
- self.assertEquals(0, p.returncode)
- out = out[out.index('') + 1:]
- out = out[:out.index('')]
- modes = [re.match(r'^ (\w+) .+', l) for l in out]
- modes = tuple(m.group(1) for m in modes if m)
- # noop doesn't do anything so no point in testing it.
- self.assertEquals(sorted(EXPECTED_MODES), sorted(modes))
-
- def test_modes(self):
- # This is a bit redundant but make sure all combinations are tested.
- files = sorted(
- i[:-len('.isolate')]
- for i in os.listdir(os.path.join(ROOT_DIR, 'tests', 'isolate'))
- if i.endswith('.isolate')
- )
- self.assertEquals(sorted(RELATIVE_CWD), files)
- self.assertEquals(sorted(DEPENDENCIES), files)
-
- if sys.platform == 'win32':
- # Symlink stuff is unsupported there, remove them from the list.
- files = [f for f in files if not f.startswith('symlink_')]
-
- # TODO(csharp): touched_only is disabled until crbug.com/150823 is fixed.
- files.remove('touch_only')
-
- # modes read and trace are tested together.
- modes_to_check = list(EXPECTED_MODES)
- modes_to_check.remove('help')
- modes_to_check.remove('merge')
- modes_to_check.remove('noop')
- modes_to_check.remove('read')
- modes_to_check.remove('trace')
- modes_to_check.append('trace_read_merge')
- for mode in modes_to_check:
- expected_cases = set('test_%s' % f for f in files)
- fixture_name = 'Isolate_%s' % mode
- fixture = getattr(sys.modules[__name__], fixture_name)
- actual_cases = set(i for i in dir(fixture) if i.startswith('test_'))
- missing = expected_cases - actual_cases
- self.assertFalse(missing, '%s.%s' % (fixture_name, missing))
-
-
-class Isolate_check(IsolateModeBase):
- LEVEL = isolate.NO_INFO
-
- def test_fail(self):
- self._execute('check', 'fail.isolate', [], False)
- self._expect_no_tree()
- self._expect_results(['fail.py'], None, None, None)
-
- def test_missing_trailing_slash(self):
- try:
- self._execute('check', 'missing_trailing_slash.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_non_existent(self):
- try:
- self._execute('check', 'non_existent.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_no_run(self):
- self._execute('check', 'no_run.isolate', [], False)
- self._expect_no_tree()
- self._expect_results([], None, None, None)
-
- # TODO(csharp): Disabled until crbug.com/150823 is fixed.
- def do_not_test_touch_only(self):
- self._execute('check', 'touch_only.isolate', ['-V', 'FLAG', 'gyp'], False)
- self._expect_no_tree()
- empty = os.path.join('files1', 'test_file1.txt')
- self._expected_result(['touch_only.py', 'gyp'], None, empty)
-
- def test_touch_root(self):
- self._execute('check', 'touch_root.isolate', [], False)
- self._expect_no_tree()
- self._expect_results(['touch_root.py'], None, None, None)
-
- def test_with_flag(self):
- self._execute('check', 'with_flag.isolate', ['-V', 'FLAG', 'gyp'], False)
- self._expect_no_tree()
- self._expect_results(
- ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'}, None)
-
- if sys.platform != 'win32':
- def test_symlink_full(self):
- self._execute('check', 'symlink_full.isolate', [], False)
- self._expect_no_tree()
- self._expect_results(['symlink_full.py'], None, None, None)
-
- def test_symlink_partial(self):
- self._execute('check', 'symlink_partial.isolate', [], False)
- self._expect_no_tree()
- self._expect_results(['symlink_partial.py'], None, None, None)
-
-
-class Isolate_hashtable(IsolateModeBase):
- LEVEL = isolate.WITH_HASH
-
- def _gen_expected_tree(self, empty_file):
- expected = [
- v['sha-1'] for v in self._gen_files(False, empty_file).itervalues()
- ]
- expected.append(calc_sha1(self.result))
- return expected
-
- def _expected_hash_tree(self, empty_file):
- """Verifies the files written in the temporary directory."""
- self.assertEquals(
- sorted(self._gen_expected_tree(empty_file)), self._result_tree())
-
- def test_fail(self):
- self._execute('hashtable', 'fail.isolate', [], False)
- self._expected_hash_tree(None)
- self._expect_results(['fail.py'], None, None, None)
-
- def test_missing_trailing_slash(self):
- try:
- self._execute('hashtable', 'missing_trailing_slash.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_non_existent(self):
- try:
- self._execute('hashtable', 'non_existent.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_no_run(self):
- self._execute('hashtable', 'no_run.isolate', [], False)
- self._expected_hash_tree(None)
- self._expect_results([], None, None, None)
-
- # TODO(csharp): Disabled until crbug.com/150823 is fixed.
- def do_not_test_touch_only(self):
- self._execute(
- 'hashtable', 'touch_only.isolate', ['-V', 'FLAG', 'gyp'], False)
- empty = os.path.join('files1', 'test_file1.txt')
- self._expected_hash_tree(empty)
- self._expected_result(['touch_only.py', 'gyp'], None, empty)
-
- def test_touch_root(self):
- self._execute('hashtable', 'touch_root.isolate', [], False)
- self._expected_hash_tree(None)
- self._expect_results(['touch_root.py'], None, None, None)
-
- def test_with_flag(self):
- self._execute(
- 'hashtable', 'with_flag.isolate', ['-V', 'FLAG', 'gyp'], False)
- self._expected_hash_tree(None)
- self._expect_results(
- ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'}, None)
-
- if sys.platform != 'win32':
- def test_symlink_full(self):
- self._execute('hashtable', 'symlink_full.isolate', [], False)
- # Construct our own tree.
- expected = [
- str(v['sha-1'])
- for v in self._gen_files(False, None).itervalues() if 'sha-1' in v
- ]
- expected.append(calc_sha1(self.result))
- self.assertEquals(sorted(expected), self._result_tree())
- self._expect_results(['symlink_full.py'], None, None, None)
-
- def test_symlink_partial(self):
- self._execute('hashtable', 'symlink_partial.isolate', [], False)
- # Construct our own tree.
- expected = [
- str(v['sha-1'])
- for v in self._gen_files(False, None).itervalues() if 'sha-1' in v
- ]
- expected.append(calc_sha1(self.result))
- self.assertEquals(sorted(expected), self._result_tree())
- self._expect_results(['symlink_partial.py'], None, None, None)
-
-
-
-class Isolate_remap(IsolateModeBase):
- LEVEL = isolate.STATS_ONLY
-
- def test_fail(self):
- self._execute('remap', 'fail.isolate', [], False)
- self._expected_tree()
- self._expect_results(['fail.py'], None, None, None)
-
- def test_missing_trailing_slash(self):
- try:
- self._execute('remap', 'missing_trailing_slash.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_non_existent(self):
- try:
- self._execute('remap', 'non_existent.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_no_run(self):
- self._execute('remap', 'no_run.isolate', [], False)
- self._expected_tree()
- self._expect_results([], None, None, None)
-
- # TODO(csharp): Disabled until crbug.com/150823 is fixed.
- def do_not_test_touch_only(self):
- self._execute('remap', 'touch_only.isolate', ['-V', 'FLAG', 'gyp'], False)
- self._expected_tree()
- empty = os.path.join('files1', 'test_file1.txt')
- self._expect_results(
- ['touch_only.py', 'gyp'], None, {u'FLAG': u'gyp'}, empty)
-
- def test_touch_root(self):
- self._execute('remap', 'touch_root.isolate', [], False)
- self._expected_tree()
- self._expect_results(['touch_root.py'], None, None, None)
-
- def test_with_flag(self):
- self._execute('remap', 'with_flag.isolate', ['-V', 'FLAG', 'gyp'], False)
- self._expected_tree()
- self._expect_results(
- ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'}, None)
-
- if sys.platform != 'win32':
- def test_symlink_full(self):
- self._execute('remap', 'symlink_full.isolate', [], False)
- self._expected_tree()
- self._expect_results(['symlink_full.py'], None, None, None)
-
- def test_symlink_partial(self):
- self._execute('remap', 'symlink_partial.isolate', [], False)
- self._expected_tree()
- self._expect_results(['symlink_partial.py'], None, None, None)
-
-
-class Isolate_run(IsolateModeBase):
- LEVEL = isolate.STATS_ONLY
-
- def _expect_empty_tree(self):
- self.assertEquals([], self._result_tree())
-
- def test_fail(self):
- try:
- self._execute('run', 'fail.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_empty_tree()
- self._expect_results(['fail.py'], None, None, None)
-
- def test_missing_trailing_slash(self):
- try:
- self._execute('run', 'missing_trailing_slash.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_non_existent(self):
- try:
- self._execute('run', 'non_existent.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_no_tree()
- self._expect_no_result()
-
- def test_no_run(self):
- try:
- self._execute('run', 'no_run.isolate', [], False)
- self.fail()
- except subprocess.CalledProcessError:
- pass
- self._expect_empty_tree()
- self._expect_no_result()
-
- # TODO(csharp): Disabled until crbug.com/150823 is fixed.
- def do_not_test_touch_only(self):
- self._execute('run', 'touch_only.isolate', ['-V', 'FLAG', 'run'], False)
- self._expect_empty_tree()
- empty = os.path.join('files1', 'test_file1.txt')
- self._expect_results(
- ['touch_only.py', 'run'], None, {u'FLAG': u'run'}, empty)
-
- def test_touch_root(self):
- self._execute('run', 'touch_root.isolate', [], False)
- self._expect_empty_tree()
- self._expect_results(['touch_root.py'], None, None, None)
-
- def test_with_flag(self):
- self._execute('run', 'with_flag.isolate', ['-V', 'FLAG', 'run'], False)
- # Not sure about the empty tree, should be deleted.
- self._expect_empty_tree()
- self._expect_results(
- ['with_flag.py', 'run'], None, {u'FLAG': u'run'}, None)
-
- if sys.platform != 'win32':
- def test_symlink_full(self):
- self._execute('run', 'symlink_full.isolate', [], False)
- self._expect_empty_tree()
- self._expect_results(['symlink_full.py'], None, None, None)
-
- def test_symlink_partial(self):
- self._execute('run', 'symlink_partial.isolate', [], False)
- self._expect_empty_tree()
- self._expect_results(['symlink_partial.py'], None, None, None)
-
-
-class Isolate_trace_read_merge(IsolateModeBase):
- # Tests both trace, read and merge.
- # Warning: merge updates .isolate files. But they are currently in their
- # canonical format so they shouldn't be changed.
- LEVEL = isolate.STATS_ONLY
-
- def _check_merge(self, filename):
- filepath = isolate.trace_inputs.get_native_path_case(
- os.path.join(ROOT_DIR, 'tests', 'isolate', filename))
- expected = 'Updating %s\n' % filepath
- with open(filepath, 'rb') as f:
- old_content = f.read()
- out = self._execute('merge', filename, [], True) or ''
- self.assertEquals(expected, out)
- with open(filepath, 'rb') as f:
- new_content = f.read()
- self.assertEquals(old_content, new_content)
-
- def test_fail(self):
- # Even if the process returns non-zero, the trace will still be good.
- try:
- self._execute('trace', 'fail.isolate', ['-v'], True)
- self.fail()
- except subprocess.CalledProcessError, e:
- self.assertEquals('', e.output)
- self._expect_no_tree()
- self._expect_results(['fail.py'], None, None, None)
- expected = self._wrap_in_condition(
- {
- isolate.KEY_TRACKED: [
- 'fail.py',
- ],
- })
- out = self._execute('read', 'fail.isolate', [], True) or ''
- self.assertEquals(expected.splitlines(), out.splitlines())
- self._check_merge('fail.isolate')
-
- def test_missing_trailing_slash(self):
- try:
- self._execute('trace', 'missing_trailing_slash.isolate', [], True)
- self.fail()
- except subprocess.CalledProcessError, e:
- self.assertEquals('', e.output)
- out = e.stderr
- self._expect_no_tree()
- self._expect_no_result()
- expected = (
- '\n'
- 'Error: Input directory %s must have a trailing slash\n' %
- os.path.join(ROOT_DIR, 'tests', 'isolate', 'files1')
- )
- self.assertEquals(expected, out)
-
- def test_non_existent(self):
- try:
- self._execute('trace', 'non_existent.isolate', [], True)
- self.fail()
- except subprocess.CalledProcessError, e:
- self.assertEquals('', e.output)
- out = e.stderr
- self._expect_no_tree()
- self._expect_no_result()
- expected = (
- '\n'
- 'Error: Input file %s doesn\'t exist\n' %
- os.path.join(ROOT_DIR, 'tests', 'isolate', 'A_file_that_do_not_exist')
- )
- self.assertEquals(expected, out)
-
- def test_no_run(self):
- try:
- self._execute('trace', 'no_run.isolate', [], True)
- self.fail()
- except subprocess.CalledProcessError, e:
- out = e.output
- err = e.stderr
- self._expect_no_tree()
- self._expect_no_result()
- expected = '\nError: No command to run\n'
- self.assertEquals('', out)
- self.assertEquals(expected, err)
-
- # TODO(csharp): Disabled until crbug.com/150823 is fixed.
- def do_not_test_touch_only(self):
- out = self._execute(
- 'trace', 'touch_only.isolate', ['-V', 'FLAG', 'trace'], True)
- self.assertEquals('', out)
- self._expect_no_tree()
- empty = os.path.join('files1', 'test_file1.txt')
- self._expect_results(
- ['touch_only.py', 'trace'], None, {u'FLAG': u'trace'}, empty)
- expected = {
- isolate.KEY_TRACKED: [
- 'touch_only.py',
- ],
- isolate.KEY_TOUCHED: [
- # Note that .isolate format mandates / and not os.path.sep.
- 'files1/test_file1.txt',
- ],
- }
- if sys.platform != 'linux2':
- # TODO(maruel): Implement touch-only tracing on non-linux.
- del expected[isolate.KEY_TOUCHED]
-
- out = self._execute('read', 'touch_only.isolate', [], True)
- self.assertEquals(self._wrap_in_condition(expected), out)
- self._check_merge('touch_only.isolate')
-
- def test_touch_root(self):
- out = self._execute('trace', 'touch_root.isolate', [], True)
- self.assertEquals('', out)
- self._expect_no_tree()
- self._expect_results(['touch_root.py'], None, None, None)
- expected = self._wrap_in_condition(
- {
- isolate.KEY_TRACKED: [
- '../../isolate.py',
- 'touch_root.py',
- ],
- })
- out = self._execute('read', 'touch_root.isolate', [], True)
- self.assertEquals(expected, out)
- self._check_merge('touch_root.isolate')
-
- def test_with_flag(self):
- out = self._execute(
- 'trace', 'with_flag.isolate', ['-V', 'FLAG', 'trace'], True)
- self.assertEquals('', out)
- self._expect_no_tree()
- self._expect_results(
- ['with_flag.py', 'trace'], None, {u'FLAG': u'trace'}, None)
- expected = {
- isolate.KEY_TRACKED: [
- 'with_flag.py',
- ],
- isolate.KEY_UNTRACKED: [
- # Note that .isolate format mandates / and not os.path.sep.
- 'files1/',
- ],
- }
- out = self._execute('read', 'with_flag.isolate', [], True)
- self.assertEquals(self._wrap_in_condition(expected), out)
- self._check_merge('with_flag.isolate')
-
- if sys.platform != 'win32':
- def test_symlink_full(self):
- out = self._execute(
- 'trace', 'symlink_full.isolate', [], True)
- self.assertEquals('', out)
- self._expect_no_tree()
- self._expect_results(['symlink_full.py'], None, None, None)
- expected = {
- isolate.KEY_TRACKED: [
- 'symlink_full.py',
- ],
- isolate.KEY_UNTRACKED: [
- # Note that .isolate format mandates / and not os.path.sep.
- 'files2/',
- ],
- }
- out = self._execute('read', 'symlink_full.isolate', [], True)
- self.assertEquals(self._wrap_in_condition(expected), out)
- self._check_merge('symlink_full.isolate')
-
- def test_symlink_partial(self):
- out = self._execute(
- 'trace', 'symlink_partial.isolate', [], True)
- self.assertEquals('', out)
- self._expect_no_tree()
- self._expect_results(['symlink_partial.py'], None, None, None)
- expected = {
- isolate.KEY_TRACKED: [
- 'symlink_partial.py',
- ],
- isolate.KEY_UNTRACKED: [
- 'files2/test_file2.txt',
- ],
- }
- out = self._execute('read', 'symlink_partial.isolate', [], True)
- self.assertEquals(self._wrap_in_condition(expected), out)
- self._check_merge('symlink_partial.isolate')
-
-
-class IsolateNoOutdir(IsolateBase):
- # Test without the --outdir flag.
- # So all the files are first copied in the tempdir and the test is run from
- # there.
- def setUp(self):
- super(IsolateNoOutdir, self).setUp()
- self.root = os.path.join(self.tempdir, 'root')
- os.makedirs(os.path.join(self.root, 'tests', 'isolate'))
- for i in ('touch_root.isolate', 'touch_root.py'):
- shutil.copy(
- os.path.join(ROOT_DIR, 'tests', 'isolate', i),
- os.path.join(self.root, 'tests', 'isolate', i))
- shutil.copy(
- os.path.join(ROOT_DIR, 'isolate.py'),
- os.path.join(self.root, 'isolate.py'))
-
- def _execute(self, mode, args, need_output):
- """Executes isolate.py."""
- cmd = [
- sys.executable, os.path.join(ROOT_DIR, 'isolate.py'),
- mode,
- '--result', self.result,
- ]
- cmd.extend(args)
-
- env = os.environ.copy()
- if 'ISOLATE_DEBUG' in env:
- del env['ISOLATE_DEBUG']
-
- if need_output or not VERBOSE:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
- else:
- cmd.extend(['-v'] * 3)
- stdout = None
- stderr = None
-
- logging.debug(cmd)
- cwd = self.tempdir
- p = subprocess.Popen(
- cmd,
- stdout=stdout,
- stderr=stderr,
- cwd=cwd,
- env=env,
- universal_newlines=True)
- out, err = p.communicate()
- if p.returncode:
- raise CalledProcessError(p.returncode, cmd, out, err, cwd)
- return out
-
- def mode(self):
- """Returns the execution mode corresponding to this test case."""
- test_id = self.id().split('.')
- self.assertEquals(3, len(test_id))
- self.assertEquals('__main__', test_id[0])
- return re.match('^test_([a-z]+)$', test_id[2]).group(1)
-
- def filename(self):
- """Returns the filename corresponding to this test case."""
- filename = os.path.join(self.root, 'tests', 'isolate', 'touch_root.isolate')
- self.assertTrue(os.path.isfile(filename), filename)
- return filename
-
- def test_check(self):
- self._execute('check', ['--isolate', self.filename()], False)
- files = sorted([
- 'isolate_smoke_test.results',
- 'isolate_smoke_test.state',
- os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
- os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
- os.path.join('root', 'isolate.py'),
- ])
- self.assertEquals(files, list_files_tree(self.tempdir))
-
- def test_hashtable(self):
- self._execute('hashtable', ['--isolate', self.filename()], False)
- files = sorted([
- os.path.join(
- 'hashtable', calc_sha1(os.path.join(ROOT_DIR, 'isolate.py'))),
- os.path.join(
- 'hashtable',
- calc_sha1(
- os.path.join(ROOT_DIR, 'tests', 'isolate', 'touch_root.py'))),
- os.path.join('hashtable', calc_sha1(os.path.join(self.result))),
- 'isolate_smoke_test.results',
- 'isolate_smoke_test.state',
- os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
- os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
- os.path.join('root', 'isolate.py'),
- ])
- self.assertEquals(files, list_files_tree(self.tempdir))
-
- def test_remap(self):
- self._execute('remap', ['--isolate', self.filename()], False)
- files = sorted([
- 'isolate_smoke_test.results',
- 'isolate_smoke_test.state',
- os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
- os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
- os.path.join('root', 'isolate.py'),
- ])
- self.assertEquals(files, list_files_tree(self.tempdir))
-
- def test_run(self):
- self._execute('run', ['--isolate', self.filename()], False)
- files = sorted([
- 'isolate_smoke_test.results',
- 'isolate_smoke_test.state',
- os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
- os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
- os.path.join('root', 'isolate.py'),
- ])
- self.assertEquals(files, list_files_tree(self.tempdir))
-
- def test_trace_read_merge(self):
- self._execute('trace', ['--isolate', self.filename()], False)
- # Read the trace before cleaning up. No need to specify self.filename()
- # because add the needed information is in the .state file.
- output = self._execute('read', [], True)
- expected = {
- isolate.KEY_TRACKED: [
- '../../isolate.py',
- 'touch_root.py',
- ],
- }
- self.assertEquals(self._wrap_in_condition(expected), output)
-
- output = self._execute('merge', [], True)
- expected = 'Updating %s\n' % isolate.trace_inputs.get_native_path_case(
- os.path.join(self.root, 'tests', 'isolate', 'touch_root.isolate'))
- self.assertEquals(expected, output)
- # In theory the file is going to be updated but in practice its content
- # won't change.
-
- # Clean the directory from the logs, which are OS-specific.
- isolate.trace_inputs.get_api().clean_trace(
- os.path.join(self.tempdir, 'isolate_smoke_test.results.log'))
- files = sorted([
- 'isolate_smoke_test.results',
- 'isolate_smoke_test.state',
- os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
- os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
- os.path.join('root', 'isolate.py'),
- ])
- self.assertEquals(files, list_files_tree(self.tempdir))
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/tests/isolate_test.py b/tools/isolate/tests/isolate_test.py
deleted file mode 100755
index 79eb663..0000000
--- a/tools/isolate/tests/isolate_test.py
+++ /dev/null
@@ -1,836 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cStringIO
-import logging
-import os
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-import isolate
-# Create shortcuts.
-from isolate import KEY_TOUCHED, KEY_TRACKED, KEY_UNTRACKED
-
-
-class Isolate(unittest.TestCase):
- def setUp(self):
- # Everything should work even from another directory.
- os.chdir(os.path.dirname(ROOT_DIR))
-
- def test_load_isolate_for_flavor_empty(self):
- content = "{}"
- command, infiles, touched, read_only = isolate.load_isolate_for_flavor(
- content, isolate.get_flavor())
- self.assertEquals([], command)
- self.assertEquals([], infiles)
- self.assertEquals([], touched)
- self.assertEquals(None, read_only)
-
- def test_result_load_empty(self):
- values = {
- }
- expected = {
- 'command': [],
- 'files': {},
- 'os': isolate.get_flavor(),
- }
- self.assertEquals(expected, isolate.Result.load(values).flatten())
-
- def test_result_load(self):
- values = {
- 'command': 'maybe',
- 'files': {'foo': 42},
- 'read_only': 2,
- }
- expected = {
- 'command': 'maybe',
- 'files': {'foo': 42},
- 'os': isolate.get_flavor(),
- 'read_only': 2,
- }
- self.assertEquals(expected, isolate.Result.load(values).flatten())
-
- def test_result_load_unexpected(self):
- values = {
- 'foo': 'bar',
- }
- expected = (
- ("Found unexpected entry {'foo': 'bar'} while constructing an "
- "object Result"),
- {'foo': 'bar'},
- 'Result')
- try:
- isolate.Result.load(values)
- self.fail()
- except ValueError, e:
- self.assertEquals(expected, e.args)
-
- def test_savedstate_load_empty(self):
- values = {
- }
- expected = {
- 'variables': {},
- }
- self.assertEquals(expected, isolate.SavedState.load(values).flatten())
-
- def test_savedstate_load(self):
- values = {
- 'isolate_file': os.path.join(ROOT_DIR, 'maybe'),
- 'variables': {'foo': 42},
- }
- expected = {
- 'isolate_file': os.path.join(ROOT_DIR, 'maybe'),
- 'variables': {'foo': 42},
- }
- self.assertEquals(expected, isolate.SavedState.load(values).flatten())
-
- def test_load_stale_result(self):
- directory = tempfile.mkdtemp(prefix='isolate_')
- try:
- isolate_file = os.path.join(
- ROOT_DIR, 'tests', 'isolate', 'touch_root.isolate')
- class Options(object):
- result = os.path.join(directory, 'result')
- outdir = os.path.join(directory, '0utdir')
- isolate = isolate_file
- variables = {'foo': 'bar'}
-
- # Data to be loaded in the .result file. Do not create a .state file.
- input_data = {
- 'command': ['python'],
- 'files': {
- 'foo': {
- "mode": 416,
- "sha-1": "invalid",
- "size": 538,
- "timestamp": 1335146921,
- },
- os.path.join('tests', 'isolate', 'touch_root.py'): {
- "mode": 488,
- "sha-1": "invalid",
- "size": 538,
- "timestamp": 1335146921,
- },
- },
- }
- isolate.trace_inputs.write_json(Options.result, input_data, False)
-
- # A CompleteState object contains two parts:
- # - Result instance stored in complete_state.result, corresponding to the
- # .result file, is what is read by run_test_from_archive.py.
- # - SavedState instance stored in compelte_state.saved_state,
- # corresponding to the .state file, which is simply to aid the developer
- # when re-running the same command multiple times and contain
- # discardable information.
- complete_state = isolate.load_complete_state(Options, isolate.STATS_ONLY)
- actual_result = complete_state.result.flatten()
- actual_saved_state = complete_state.saved_state.flatten()
-
- expected_result = {
- 'command': ['python', 'touch_root.py'],
- 'files': {
- os.path.join(u'tests', 'isolate', 'touch_root.py'): {
- 'mode': 488,
- 'size': self._size('tests', 'isolate', 'touch_root.py'),
- },
- 'isolate.py': {
- 'mode': 488,
- 'size': self._size('isolate.py'),
- },
- },
- 'os': isolate.get_flavor(),
- 'relative_cwd': os.path.join('tests', 'isolate'),
- }
- if sys.platform == 'win32':
- # 'mode' are not saved in windows.
- for values in expected_result['files'].itervalues():
- del values['mode']
- for item in actual_result['files'].itervalues():
- self.assertTrue(item.pop('timestamp'))
- self.assertEquals(expected_result, actual_result)
-
- expected_saved_state = {
- 'isolate_file': isolate_file,
- 'variables': {'foo': 'bar'},
- }
- self.assertEquals(expected_saved_state, actual_saved_state)
- finally:
- isolate.run_test_from_archive.rmtree(directory)
-
- @staticmethod
- def _size(*args):
- return os.stat(os.path.join(ROOT_DIR, *args)).st_size
-
- def test_unknown_key(self):
- try:
- isolate.verify_variables({'foo': [],})
- self.fail()
- except AssertionError:
- pass
-
- def test_unknown_var(self):
- try:
- isolate.verify_condition({'variables': {'foo': [],}})
- self.fail()
- except AssertionError:
- pass
-
- def test_union(self):
- value1 = {
- 'a': set(['A']),
- 'b': ['B', 'C'],
- 'c': 'C',
- }
- value2 = {
- 'a': set(['B', 'C']),
- 'b': [],
- 'd': set(),
- }
- expected = {
- 'a': set(['A', 'B', 'C']),
- 'b': ['B', 'C'],
- 'c': 'C',
- 'd': set(),
- }
- self.assertEquals(expected, isolate.union(value1, value2))
-
- def test_eval_content(self):
- try:
- # Intrinsics are not available.
- isolate.eval_content('map(str, [1, 2])')
- self.fail()
- except NameError:
- pass
-
- def test_load_isolate_as_config_empty(self):
- self.assertEquals({}, isolate.load_isolate_as_config(
- {}, None, []).flatten())
-
- def test_load_isolate_as_config(self):
- value = {
- 'variables': {
- KEY_TRACKED: ['a'],
- KEY_UNTRACKED: ['b'],
- KEY_TOUCHED: ['touched'],
- },
- 'conditions': [
- ['OS=="atari"', {
- 'variables': {
- KEY_TRACKED: ['c', 'x'],
- KEY_UNTRACKED: ['d'],
- KEY_TOUCHED: ['touched_a'],
- 'command': ['echo', 'Hello World'],
- 'read_only': True,
- },
- }, { # else
- 'variables': {
- KEY_TRACKED: ['e', 'x'],
- KEY_UNTRACKED: ['f'],
- KEY_TOUCHED: ['touched_e'],
- 'command': ['echo', 'You should get an Atari'],
- },
- }],
- ['OS=="amiga"', {
- 'variables': {
- KEY_TRACKED: ['g'],
- 'read_only': False,
- },
- }],
- ['OS=="dendy"', {
- }],
- ['OS=="coleco"', {
- }, { # else
- 'variables': {
- KEY_UNTRACKED: ['h'],
- 'read_only': None,
- },
- }],
- ],
- }
- expected = {
- 'amiga': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TOUCHED: ['touched', 'touched_e'],
- KEY_TRACKED: ['a', 'e', 'g', 'x'],
- KEY_UNTRACKED: ['b', 'f', 'h'],
- 'read_only': False,
- },
- 'atari': {
- 'command': ['echo', 'Hello World'],
- KEY_TOUCHED: ['touched', 'touched_a'],
- KEY_TRACKED: ['a', 'c', 'x'],
- KEY_UNTRACKED: ['b', 'd', 'h'],
- 'read_only': True,
- },
- 'coleco': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TOUCHED: ['touched', 'touched_e'],
- KEY_TRACKED: ['a', 'e', 'x'],
- KEY_UNTRACKED: ['b', 'f'],
- },
- 'dendy': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TOUCHED: ['touched', 'touched_e'],
- KEY_TRACKED: ['a', 'e', 'x'],
- KEY_UNTRACKED: ['b', 'f', 'h'],
- },
- }
- self.assertEquals(
- expected, isolate.load_isolate_as_config(value, None, []).flatten())
-
- def test_load_isolate_as_config_duplicate_command(self):
- value = {
- 'variables': {
- 'command': ['rm', '-rf', '/'],
- },
- 'conditions': [
- ['OS=="atari"', {
- 'variables': {
- 'command': ['echo', 'Hello World'],
- },
- }],
- ],
- }
- try:
- isolate.load_isolate_as_config(value, None, [])
- self.fail()
- except AssertionError:
- pass
-
- def test_load_isolate_as_config_no_condition(self):
- value = {
- 'variables': {
- KEY_TRACKED: ['a'],
- KEY_UNTRACKED: ['b'],
- },
- }
- expected = {
- KEY_TRACKED: ['a'],
- KEY_UNTRACKED: ['b'],
- }
- actual = isolate.load_isolate_as_config(value, None, [])
- # Flattening the whole config will discard 'None'.
- self.assertEquals({}, actual.flatten())
- self.assertEquals([None], actual.per_os.keys())
- # But the 'None' value is still available as a backup.
- self.assertEquals(expected, actual.per_os[None].flatten())
-
- def test_invert_map(self):
- value = {
- 'amiga': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TOUCHED: ['touched', 'touched_e'],
- KEY_TRACKED: ['a', 'e', 'g', 'x'],
- KEY_UNTRACKED: ['b', 'f', 'h'],
- 'read_only': False,
- },
- 'atari': {
- 'command': ['echo', 'Hello World'],
- KEY_TOUCHED: ['touched', 'touched_a'],
- KEY_TRACKED: ['a', 'c', 'x'],
- KEY_UNTRACKED: ['b', 'd', 'h'],
- 'read_only': True,
- },
- 'coleco': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TOUCHED: ['touched', 'touched_e'],
- KEY_TRACKED: ['a', 'e', 'x'],
- KEY_UNTRACKED: ['b', 'f'],
- },
- 'dendy': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TOUCHED: ['touched', 'touched_e'],
- KEY_TRACKED: ['a', 'e', 'x'],
- KEY_UNTRACKED: ['b', 'f', 'h'],
- },
- }
- expected_values = {
- 'command': {
- ('echo', 'Hello World'): set(['atari']),
- ('echo', 'You should get an Atari'): set(['amiga', 'coleco', 'dendy']),
- },
- KEY_TRACKED: {
- 'a': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'c': set(['atari']),
- 'e': set(['amiga', 'coleco', 'dendy']),
- 'g': set(['amiga']),
- 'x': set(['amiga', 'atari', 'coleco', 'dendy']),
- },
- KEY_UNTRACKED: {
- 'b': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'd': set(['atari']),
- 'f': set(['amiga', 'coleco', 'dendy']),
- 'h': set(['amiga', 'atari', 'dendy']),
- },
- KEY_TOUCHED: {
- 'touched': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'touched_a': set(['atari']),
- 'touched_e': set(['amiga', 'coleco', 'dendy']),
- },
- 'read_only': {
- None: set(['coleco', 'dendy']),
- False: set(['amiga']),
- True: set(['atari']),
- },
- }
- expected_oses = set(['amiga', 'atari', 'coleco', 'dendy'])
- actual_values, actual_oses = isolate.invert_map(value)
- self.assertEquals(expected_values, actual_values)
- self.assertEquals(expected_oses, actual_oses)
-
- def test_reduce_inputs(self):
- values = {
- 'command': {
- ('echo', 'Hello World'): set(['atari']),
- ('echo', 'You should get an Atari'): set(['amiga', 'coleco', 'dendy']),
- },
- KEY_TRACKED: {
- 'a': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'c': set(['atari']),
- 'e': set(['amiga', 'coleco', 'dendy']),
- 'g': set(['amiga']),
- 'x': set(['amiga', 'atari', 'coleco', 'dendy']),
- },
- KEY_UNTRACKED: {
- 'b': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'd': set(['atari']),
- 'f': set(['amiga', 'coleco', 'dendy']),
- 'h': set(['amiga', 'atari', 'dendy']),
- },
- KEY_TOUCHED: {
- 'touched': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'touched_a': set(['atari']),
- 'touched_e': set(['amiga', 'coleco', 'dendy']),
- },
- 'read_only': {
- None: set(['coleco', 'dendy']),
- False: set(['amiga']),
- True: set(['atari']),
- },
- }
- oses = set(['amiga', 'atari', 'coleco', 'dendy'])
- expected_values = {
- 'command': {
- ('echo', 'Hello World'): set(['atari']),
- ('echo', 'You should get an Atari'): set(['!atari']),
- },
- KEY_TRACKED: {
- 'a': set([None]),
- 'c': set(['atari']),
- 'e': set(['!atari']),
- 'g': set(['amiga']),
- 'x': set([None]),
- },
- KEY_UNTRACKED: {
- 'b': set([None]),
- 'd': set(['atari']),
- 'f': set(['!atari']),
- 'h': set(['!coleco']),
- },
- KEY_TOUCHED: {
- 'touched': set([None]),
- 'touched_a': set(['atari']),
- 'touched_e': set(['!atari']),
- },
- 'read_only': {
- None: set(['coleco', 'dendy']),
- False: set(['amiga']),
- True: set(['atari']),
- },
- }
- actual_values, actual_oses = isolate.reduce_inputs(values, oses)
- self.assertEquals(expected_values, actual_values)
- self.assertEquals(oses, actual_oses)
-
- def test_reduce_inputs_take_strongest_dependency(self):
- values = {
- 'command': {
- ('echo', 'Hello World'): set(['atari']),
- ('echo', 'You should get an Atari'): set(['amiga', 'coleco', 'dendy']),
- },
- KEY_TRACKED: {
- 'a': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'b': set(['amiga', 'atari', 'coleco']),
- },
- KEY_UNTRACKED: {
- 'c': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'd': set(['amiga', 'coleco', 'dendy']),
- },
- KEY_TOUCHED: {
- 'a': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'b': set(['atari', 'coleco', 'dendy']),
- 'c': set(['amiga', 'atari', 'coleco', 'dendy']),
- 'd': set(['atari', 'coleco', 'dendy']),
- },
- }
- oses = set(['amiga', 'atari', 'coleco', 'dendy'])
- expected_values = {
- 'command': {
- ('echo', 'Hello World'): set(['atari']),
- ('echo', 'You should get an Atari'): set(['!atari']),
- },
- KEY_TRACKED: {
- 'a': set([None]),
- 'b': set(['!dendy']),
- },
- KEY_UNTRACKED: {
- 'c': set([None]),
- 'd': set(['!atari']),
- },
- KEY_TOUCHED: {
- 'b': set(['dendy']),
- 'd': set(['atari']),
- },
- 'read_only': {},
- }
- actual_values, actual_oses = isolate.reduce_inputs(values, oses)
- self.assertEquals(expected_values, actual_values)
- self.assertEquals(oses, actual_oses)
-
- def test_convert_map_to_isolate_dict(self):
- values = {
- 'command': {
- ('echo', 'Hello World'): set(['atari']),
- ('echo', 'You should get an Atari'): set(['!atari']),
- },
- KEY_TRACKED: {
- 'a': set([None]),
- 'c': set(['atari']),
- 'e': set(['!atari']),
- 'g': set(['amiga']),
- 'x': set([None]),
- },
- KEY_UNTRACKED: {
- 'b': set([None]),
- 'd': set(['atari']),
- 'f': set(['!atari']),
- 'h': set(['!coleco']),
- },
- KEY_TOUCHED: {
- 'touched': set([None]),
- 'touched_a': set(['atari']),
- 'touched_e': set(['!atari']),
- },
- 'read_only': {
- None: set(['coleco', 'dendy']),
- False: set(['amiga']),
- True: set(['atari']),
- },
- }
- oses = set(['amiga', 'atari', 'coleco', 'dendy'])
- expected = {
- 'variables': {
- KEY_TRACKED: ['a', 'x'],
- KEY_UNTRACKED: ['b'],
- KEY_TOUCHED: ['touched'],
- },
- 'conditions': [
- ['OS=="amiga"', {
- 'variables': {
- KEY_TRACKED: ['g'],
- 'read_only': False,
- },
- }],
- ['OS=="atari"', {
- 'variables': {
- 'command': ['echo', 'Hello World'],
- KEY_TRACKED: ['c'],
- KEY_UNTRACKED: ['d'],
- KEY_TOUCHED: ['touched_a'],
- 'read_only': True,
- },
- }, {
- 'variables': {
- 'command': ['echo', 'You should get an Atari'],
- KEY_TRACKED: ['e'],
- KEY_UNTRACKED: ['f'],
- KEY_TOUCHED: ['touched_e'],
- },
- }],
- ['OS=="coleco"', {
- }, {
- 'variables': {
- KEY_UNTRACKED: ['h'],
- },
- }],
- ],
- }
- self.assertEquals(
- expected, isolate.convert_map_to_isolate_dict(values, oses))
-
- def test_merge_two_empty(self):
- # Flat stay flat. Pylint is confused about union() return type.
- # pylint: disable=E1103
- actual = isolate.union(
- isolate.union(
- isolate.Configs([], None),
- isolate.load_isolate_as_config({}, None, [])),
- isolate.load_isolate_as_config({}, None, [])).flatten()
- self.assertEquals({}, actual)
-
- def test_merge_empty(self):
- actual = isolate.convert_map_to_isolate_dict(
- *isolate.reduce_inputs(*isolate.invert_map({})))
- self.assertEquals({}, actual)
-
- def test_load_two_conditions(self):
- linux = {
- 'conditions': [
- ['OS=="linux"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_linux',
- 'file_common',
- ],
- },
- }],
- ],
- }
- mac = {
- 'conditions': [
- ['OS=="mac"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_mac',
- 'file_common',
- ],
- },
- }],
- ],
- }
- expected = {
- 'linux': {
- 'isolate_dependency_tracked': ['file_common', 'file_linux'],
- },
- 'mac': {
- 'isolate_dependency_tracked': ['file_common', 'file_mac'],
- },
- }
- # Pylint is confused about union() return type.
- # pylint: disable=E1103
- configs = isolate.union(
- isolate.union(
- isolate.Configs([], None),
- isolate.load_isolate_as_config(linux, None, [])),
- isolate.load_isolate_as_config(mac, None, [])).flatten()
- self.assertEquals(expected, configs)
-
- def test_load_three_conditions(self):
- linux = {
- 'conditions': [
- ['OS=="linux"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_linux',
- 'file_common',
- ],
- },
- }],
- ],
- }
- mac = {
- 'conditions': [
- ['OS=="mac"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_mac',
- 'file_common',
- ],
- },
- }],
- ],
- }
- win = {
- 'conditions': [
- ['OS=="win"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_win',
- 'file_common',
- ],
- },
- }],
- ],
- }
- expected = {
- 'linux': {
- 'isolate_dependency_tracked': ['file_common', 'file_linux'],
- },
- 'mac': {
- 'isolate_dependency_tracked': ['file_common', 'file_mac'],
- },
- 'win': {
- 'isolate_dependency_tracked': ['file_common', 'file_win'],
- },
- }
- # Pylint is confused about union() return type.
- # pylint: disable=E1103
- configs = isolate.union(
- isolate.union(
- isolate.union(
- isolate.Configs([], None),
- isolate.load_isolate_as_config(linux, None, [])),
- isolate.load_isolate_as_config(mac, None, [])),
- isolate.load_isolate_as_config(win, None, [])).flatten()
- self.assertEquals(expected, configs)
-
- def test_merge_three_conditions(self):
- values = {
- 'linux': {
- 'isolate_dependency_tracked': ['file_common', 'file_linux'],
- },
- 'mac': {
- 'isolate_dependency_tracked': ['file_common', 'file_mac'],
- },
- 'win': {
- 'isolate_dependency_tracked': ['file_common', 'file_win'],
- },
- }
- expected = {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_common',
- ],
- },
- 'conditions': [
- ['OS=="linux"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_linux',
- ],
- },
- }],
- ['OS=="mac"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_mac',
- ],
- },
- }],
- ['OS=="win"', {
- 'variables': {
- 'isolate_dependency_tracked': [
- 'file_win',
- ],
- },
- }],
- ],
- }
- actual = isolate.convert_map_to_isolate_dict(
- *isolate.reduce_inputs(*isolate.invert_map(values)))
- self.assertEquals(expected, actual)
-
- def test_configs_comment(self):
- # Pylint is confused with isolate.union() return type.
- # pylint: disable=E1103
- configs = isolate.union(
- isolate.load_isolate_as_config({}, '# Yo dawg!\n# Chill out.\n', []),
- isolate.load_isolate_as_config({}, None, []))
- self.assertEquals('# Yo dawg!\n# Chill out.\n', configs.file_comment)
-
- configs = isolate.union(
- isolate.load_isolate_as_config({}, None, []),
- isolate.load_isolate_as_config({}, '# Yo dawg!\n# Chill out.\n', []))
- self.assertEquals('# Yo dawg!\n# Chill out.\n', configs.file_comment)
-
- # Only keep the first one.
- configs = isolate.union(
- isolate.load_isolate_as_config({}, '# Yo dawg!\n', []),
- isolate.load_isolate_as_config({}, '# Chill out.\n', []))
- self.assertEquals('# Yo dawg!\n', configs.file_comment)
-
- def test_extract_comment(self):
- self.assertEquals(
- '# Foo\n# Bar\n', isolate.extract_comment('# Foo\n# Bar\n{}'))
- self.assertEquals('', isolate.extract_comment('{}'))
-
- def _test_pretty_print_impl(self, value, expected):
- actual = cStringIO.StringIO()
- isolate.pretty_print(value, actual)
- self.assertEquals(expected, actual.getvalue())
-
- def test_pretty_print_empty(self):
- self._test_pretty_print_impl({}, '{\n}\n')
-
- def test_pretty_print_mid_size(self):
- value = {
- 'variables': {
- 'bar': [
- 'file1',
- 'file2',
- ],
- },
- 'conditions': [
- ['OS=\"foo\"', {
- 'variables': {
- isolate.KEY_UNTRACKED: [
- 'dir1',
- 'dir2',
- ],
- isolate.KEY_TRACKED: [
- 'file4',
- 'file3',
- ],
- 'command': ['python', '-c', 'print "H\\i\'"'],
- 'read_only': True,
- 'relative_cwd': 'isol\'at\\e',
- },
- }],
- ['OS=\"bar\"', {
- 'variables': {},
- }, {
- 'variables': {},
- }],
- ],
- }
- expected = (
- "{\n"
- " 'variables': {\n"
- " 'bar': [\n"
- " 'file1',\n"
- " 'file2',\n"
- " ],\n"
- " },\n"
- " 'conditions': [\n"
- " ['OS=\"foo\"', {\n"
- " 'variables': {\n"
- " 'command': [\n"
- " 'python',\n"
- " '-c',\n"
- " 'print \"H\\i\'\"',\n"
- " ],\n"
- " 'relative_cwd': 'isol\\'at\\\\e',\n"
- " 'read_only': True\n"
- " 'isolate_dependency_tracked': [\n"
- " 'file4',\n"
- " 'file3',\n"
- " ],\n"
- " 'isolate_dependency_untracked': [\n"
- " 'dir1',\n"
- " 'dir2',\n"
- " ],\n"
- " },\n"
- " }],\n"
- " ['OS=\"bar\"', {\n"
- " 'variables': {\n"
- " },\n"
- " }, {\n"
- " 'variables': {\n"
- " },\n"
- " }],\n"
- " ],\n"
- "}\n")
- self._test_pretty_print_impl(value, expected)
-
-
-if __name__ == '__main__':
- logging.basicConfig(
- level=logging.DEBUG if '-v' in sys.argv else logging.ERROR,
- format='%(levelname)5s %(filename)15s(%(lineno)3d): %(message)s')
- unittest.main()
diff --git a/tools/isolate/tests/run_test_cases/sleep.py b/tools/isolate/tests/run_test_cases/sleep.py
deleted file mode 100755
index 552f973..0000000
--- a/tools/isolate/tests/run_test_cases/sleep.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Sleeps."""
-
-import sys
-import time
-
-
-def main():
- assert len(sys.argv) == 2
- print 'Sleeping.'
- sys.stdout.flush()
- time.sleep(float(sys.argv[1]))
- print 'Slept.'
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/run_test_cases_smoke_test.py b/tools/isolate/tests/run_test_cases_smoke_test.py
deleted file mode 100755
index 0adb764..0000000
--- a/tools/isolate/tests/run_test_cases_smoke_test.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import re
-import subprocess
-import sys
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-sys.path.append(os.path.join(ROOT_DIR, 'tests', 'gtest_fake'))
-
-import gtest_fake_base
-
-
-def RunTest(test_file, extra_flags):
- target = os.path.join(ROOT_DIR, 'tests', 'gtest_fake', test_file)
- cmd = [
- sys.executable,
- os.path.join(ROOT_DIR, 'run_test_cases.py'),
- ] + extra_flags
-
- cmd.append(target)
- logging.debug(' '.join(cmd))
- proc = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- # pylint is confused.
- out, err = proc.communicate() or ('', '')
-
- return (out, err, proc.returncode)
-
-
-class TraceTestCases(unittest.TestCase):
- def setUp(self):
- # Make sure there's no environment variable that could do side effects.
- os.environ.pop('GTEST_SHARD_INDEX', '')
- os.environ.pop('GTEST_TOTAL_SHARDS', '')
-
- self.filename = 'test.results'
-
- def tearDown(self):
- if os.path.exists(self.filename):
- os.remove(self.filename)
-
- def _check_results(self, expected_out_re, out, err):
- if sys.platform == 'win32':
- out = out.replace('\r\n', '\n')
- lines = out.splitlines()
-
- for index in range(len(expected_out_re)):
- line = lines.pop(0)
- self.assertTrue(
- re.match('^%s$' % expected_out_re[index], line),
- (index, expected_out_re[index], repr(line)))
- self.assertEqual([], lines)
- self.assertEqual('', err)
-
- def _check_results_file(self, expected_file_contents_entries):
- self.assertTrue(os.path.exists(self.filename))
-
- with open(self.filename) as f:
- file_contents = json.load(f)
-
- self.assertEqual(len(expected_file_contents_entries), len(file_contents))
- for (entry_name, entry_count) in expected_file_contents_entries:
- self.assertTrue(entry_name in file_contents)
- self.assertEqual(entry_count, len(file_contents[entry_name]))
-
- def test_simple_pass(self):
- out, err, return_code = RunTest(
- 'gtest_fake_pass.py', ['--result', self.filename])
-
- self.assertEqual(0, return_code)
-
- expected_out_re = [
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- re.escape('Summary:'),
- re.escape('Success: 3 100.00%'),
- re.escape('Flaky: 0 0.00%'),
- re.escape('Fail: 0 0.00%'),
- r'\d+\.\ds Done running 3 tests with 3 executions. \d+\.\d test/s',
- ]
- self._check_results(expected_out_re, out, err)
-
- expected_result_file_entries = [
- ('Foo.Bar1', 1),
- ('Foo.Bar2', 1),
- ('Foo.Bar3', 1)
- ]
- self._check_results_file(expected_result_file_entries)
-
- def test_simple_fail(self):
- out, err, return_code = RunTest(
- 'gtest_fake_fail.py', ['--result', self.filename])
-
- self.assertEqual(1, return_code)
-
- expected_out_re = [
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- r'\[\d/\d\] \d\.\d\ds .+',
- re.escape('Note: Google Test filter = Baz.Fail'),
- r'',
- ] + [
- re.escape(l) for l in
- gtest_fake_base.get_test_output('Baz.Fail').splitlines()
- ] + [
- '',
- ] + [
- re.escape(l) for l in gtest_fake_base.get_footer(1, 1).splitlines()
- ] + [
- '',
- re.escape('Summary:'),
- re.escape('Baz.Fail failed'),
- re.escape('Success: 3 75.00%'),
- re.escape('Flaky: 0 0.00%'),
- re.escape('Fail: 1 25.00%'),
- r'\d+\.\ds Done running 4 tests with 6 executions. \d+\.\d test/s',
- ]
- self._check_results(expected_out_re, out, err)
-
- expected_result_file_entries = [
- ('Foo.Bar1', 1),
- ('Foo.Bar2', 1),
- ('Foo.Bar3', 1),
- ('Baz.Fail', 3)
- ]
- self._check_results_file(expected_result_file_entries)
-
- def test_simple_gtest_list_error(self):
- out, err, return_code = RunTest(
- 'gtest_fake_error.py', ['--no-dump'])
-
- expected_out_re = [
- 'Failed to run %s %s --gtest_list_tests' % (
- sys.executable,
- os.path.join(ROOT_DIR, 'tests', 'gtest_fake', 'gtest_fake_error.py')),
- 'stdout:',
- '',
- 'stderr:',
- 'Unable to list tests'
- ]
-
- self.assertEqual(1, return_code)
- self._check_results(expected_out_re, out, err)
-
- def test_gtest_list_tests(self):
- out, err, return_code = RunTest(
- 'gtest_fake_fail.py', ['--gtest_list_tests'])
-
- expected_out = (
- 'Foo.\n Bar1\n Bar2\n Bar3\nBaz.\n Fail\n'
- ' YOU HAVE 2 tests with ignored failures (FAILS prefix)\n\n')
- self.assertEqual(0, return_code)
- self.assertEqual(expected_out, out)
- self.assertEqual('', err)
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/tests/run_test_cases_test.py b/tools/isolate/tests/run_test_cases_test.py
deleted file mode 100755
index 4e706a9..0000000
--- a/tools/isolate/tests/run_test_cases_test.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import sys
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-import run_test_cases
-
-SLEEP = os.path.join(ROOT_DIR, 'tests', 'run_test_cases', 'sleep.py')
-
-
-def to_native_eol(string):
- if sys.platform == 'win32':
- return string.replace('\n', '\r\n')
- return string
-
-
-class ListTestCasesTest(unittest.TestCase):
- def test_shards(self):
- test_cases = (
- (range(10), 10, 0, 1),
-
- ([0, 1], 5, 0, 3),
- ([2, 3], 5, 1, 3),
- ([4 ], 5, 2, 3),
-
- ([0], 5, 0, 7),
- ([1], 5, 1, 7),
- ([2], 5, 2, 7),
- ([3], 5, 3, 7),
- ([4], 5, 4, 7),
- ([ ], 5, 5, 7),
- ([ ], 5, 6, 7),
-
- ([0, 1], 4, 0, 2),
- ([2, 3], 4, 1, 2),
- )
- for expected, range_length, index, shards in test_cases:
- result = run_test_cases.filter_shards(range(range_length), index, shards)
- self.assertEquals(
- expected, result, (result, expected, range_length, index, shards))
-
-
-class RunTestCases(unittest.TestCase):
- def test_call(self):
- cmd = [sys.executable, SLEEP, '0.001']
- # 0 means no timeout, like None.
- output, code = run_test_cases.call_with_timeout(cmd, 0)
- self.assertEquals(to_native_eol('Sleeping.\nSlept.\n'), output)
- self.assertEquals(0, code)
-
- def test_call_eol(self):
- cmd = [sys.executable, SLEEP, '0.001']
- # 0 means no timeout, like None.
- output, code = run_test_cases.call_with_timeout(
- cmd, 0, universal_newlines=True)
- self.assertEquals('Sleeping.\nSlept.\n', output)
- self.assertEquals(0, code)
-
- def test_call_timed_out_kill(self):
- cmd = [sys.executable, SLEEP, '100']
- # On a loaded system, this can be tight.
- output, code = run_test_cases.call_with_timeout(cmd, timeout=1)
- self.assertEquals(to_native_eol('Sleeping.\n'), output)
- if sys.platform == 'win32':
- self.assertEquals(1, code)
- else:
- self.assertEquals(-9, code)
-
- def test_call_timed_out_kill_eol(self):
- cmd = [sys.executable, SLEEP, '100']
- # On a loaded system, this can be tight.
- output, code = run_test_cases.call_with_timeout(
- cmd, timeout=1, universal_newlines=True)
- self.assertEquals('Sleeping.\n', output)
- if sys.platform == 'win32':
- self.assertEquals(1, code)
- else:
- self.assertEquals(-9, code)
-
- def test_call_timeout_no_kill(self):
- cmd = [sys.executable, SLEEP, '0.001']
- output, code = run_test_cases.call_with_timeout(cmd, timeout=100)
- self.assertEquals(to_native_eol('Sleeping.\nSlept.\n'), output)
- self.assertEquals(0, code)
-
- def test_call_timeout_no_kill_eol(self):
- cmd = [sys.executable, SLEEP, '0.001']
- output, code = run_test_cases.call_with_timeout(
- cmd, timeout=100, universal_newlines=True)
- self.assertEquals('Sleeping.\nSlept.\n', output)
- self.assertEquals(0, code)
-
- def test_gtest_filter(self):
- old = run_test_cases.run_test_cases
- exe = os.path.join(ROOT_DIR, 'tests', 'gtest_fake', 'gtest_fake_pass.py')
- def expect(executable, test_cases, jobs, timeout, run_all, result_file):
- self.assertEquals(run_test_cases.fix_python_path([exe]), executable)
- self.assertEquals(['Foo.Bar1', 'Foo.Bar3'], test_cases)
- self.assertEquals(run_test_cases.num_processors(), jobs)
- self.assertEquals(120, timeout)
- self.assertEquals(False, run_all)
- self.assertEquals(exe + '.run_test_cases', result_file)
- return 89
-
- try:
- run_test_cases.run_test_cases = expect
- result = run_test_cases.main([exe, '--gtest_filter=Foo.Bar*-*.Bar2'])
- self.assertEquals(89, result)
- finally:
- run_test_cases.run_test_cases = old
-
- def testRunSome(self):
- tests = [
- # Try with named arguments. Accepts 3*1 failures.
- (
- run_test_cases.RunSome(
- expected_count=10,
- retries=3,
- min_failures=1,
- max_failure_ratio=0.001),
- [False] * 4),
- # Same without named arguments.
- (run_test_cases.RunSome( 10, 3, 1, 0.001), [False] * 4),
-
- (run_test_cases.RunSome( 10, 1, 1, 0.001), [False] * 2),
- (run_test_cases.RunSome( 10, 1, 1, 0.010), [False] * 2),
-
- # For low expected_count value, retries * min_failures is the minimum
- # bound of accepted failures.
- (run_test_cases.RunSome( 10, 3, 1, 0.010), [False] * 4),
- (run_test_cases.RunSome( 10, 3, 1, 0.020), [False] * 4),
- (run_test_cases.RunSome( 10, 3, 1, 0.050), [False] * 4),
- (run_test_cases.RunSome( 10, 3, 1, 0.100), [False] * 4),
- (run_test_cases.RunSome( 10, 3, 1, 0.110), [False] * 4),
-
- # Allows expected_count + retries failures at maximum.
- (run_test_cases.RunSome( 10, 3, 1, 0.200), [False] * 6),
- (run_test_cases.RunSome( 10, 3, 1, 0.999), [False] * 30),
-
- # The asympthote is nearing max_failure_ratio for large expected_count
- # values.
- (run_test_cases.RunSome(1000, 3, 1, 0.050), [False] * 150),
- ]
- for index, (decider, rounds) in enumerate(tests):
- for index2, r in enumerate(rounds):
- self.assertFalse(decider.should_stop(), (index, index2, str(decider)))
- decider.got_result(r)
- self.assertTrue(decider.should_stop(), (index, str(decider)))
-
- def testStatsInfinite(self):
- decider = run_test_cases.RunAll()
- for _ in xrange(200):
- self.assertFalse(decider.should_stop())
- decider.got_result(False)
-
-
-class WorkerPoolTest(unittest.TestCase):
- def test_normal(self):
- mapper = lambda value: -value
- with run_test_cases.ThreadPool(8) as pool:
- for i in range(32):
- pool.add_task(mapper, i)
- results = pool.join()
- self.assertEquals(range(-31, 1), sorted(results))
-
- def test_exception(self):
- class FearsomeException(Exception):
- pass
- def mapper(value):
- raise FearsomeException(value)
- task_added = False
- try:
- with run_test_cases.ThreadPool(8) as pool:
- pool.add_task(mapper, 0)
- task_added = True
- pool.join()
- self.fail()
- except FearsomeException:
- self.assertEquals(True, task_added)
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/tests/run_test_from_archive/check_files.py b/tools/isolate/tests/run_test_from_archive/check_files.py
deleted file mode 100755
index cd429ef..0000000
--- a/tools/isolate/tests/run_test_from_archive/check_files.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Checks all the expected files are mapped."""
-
-import os
-import sys
-
-ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
-
-
-def main():
- expected = sorted([
- 'check_files.py',
- 'gtest_fake.py',
- 'file1.txt',
- 'file2.txt',
- ])
- actual = sorted(os.listdir(ROOT_DIR))
- if expected != actual:
- print >> sys.stderr, 'Expected list doesn\'t match:'
- print >> sys.stderr, ', '.join(expected)
- print >> sys.stderr, ', '.join(actual)
- return 1
-
- # Check that file2.txt is in reality file3.txt.
- with open(os.path.join(ROOT_DIR, 'file2.txt'), 'rb') as f:
- if f.read() != 'File3\n':
- print >> sys.stderr, 'file2.txt should be file3.txt in reality'
- return 2
-
- print 'Success'
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/run_test_from_archive/check_files.results b/tools/isolate/tests/run_test_from_archive/check_files.results
deleted file mode 100644
index f7a2c21..0000000
--- a/tools/isolate/tests/run_test_from_archive/check_files.results
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "command": ["python", "check_files.py"],
- "includes": [
- "c926928a4fe1eb59159849816076f49f602e76cf",
- "ebb03d931934088b29a5010f67166447a7a8001a"
- ],
- "files": {
- "check_files.py": {
- "sha-1": "7057184fcc8351aced8f4222fa9abbb5defc7b9e"
- },
- "file2.txt": {
- "sha-1": "7c0095bad1bcbf38e36d208e2b685c5d279146a5"
- }
- }
-}
diff --git a/tools/isolate/tests/run_test_from_archive/file1.txt b/tools/isolate/tests/run_test_from_archive/file1.txt
deleted file mode 100644
index 03f128c..0000000
--- a/tools/isolate/tests/run_test_from_archive/file1.txt
+++ /dev/null
@@ -1 +0,0 @@
-File1
diff --git a/tools/isolate/tests/run_test_from_archive/file1_copy.txt b/tools/isolate/tests/run_test_from_archive/file1_copy.txt
deleted file mode 100644
index 03f128c..0000000
--- a/tools/isolate/tests/run_test_from_archive/file1_copy.txt
+++ /dev/null
@@ -1 +0,0 @@
-File1
diff --git a/tools/isolate/tests/run_test_from_archive/file2.txt b/tools/isolate/tests/run_test_from_archive/file2.txt
deleted file mode 100644
index 8b75520..0000000
--- a/tools/isolate/tests/run_test_from_archive/file2.txt
+++ /dev/null
@@ -1 +0,0 @@
-File2.txt
diff --git a/tools/isolate/tests/run_test_from_archive/file3.txt b/tools/isolate/tests/run_test_from_archive/file3.txt
deleted file mode 100644
index a0ad61a..0000000
--- a/tools/isolate/tests/run_test_from_archive/file3.txt
+++ /dev/null
@@ -1 +0,0 @@
-File3
diff --git a/tools/isolate/tests/run_test_from_archive/gtest_fake.py b/tools/isolate/tests/run_test_from_archive/gtest_fake.py
deleted file mode 100755
index e9c3560..0000000
--- a/tools/isolate/tests/run_test_from_archive/gtest_fake.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Simulate a google-test executable.
-
-http://code.google.com/p/googletest/
-"""
-
-import optparse
-import sys
-
-
-TESTS = {
- 'Foo': ['Bar1', 'Bar2', 'Bar3'],
- 'Baz': ['Fail'],
-}
-TOTAL = sum(len(v) for v in TESTS.itervalues())
-
-
-def get_test_output(test_name):
- fixture, case = test_name.split('.', 1)
- return (
- '[==========] Running 1 test from 1 test case.\n'
- '[----------] Global test environment set-up.\n'
- '[----------] 1 test from %(fixture)s\n'
- '[ RUN ] %(fixture)s.%(case)s\n'
- '[ OK ] %(fixture)s.%(case)s (0 ms)\n'
- '[----------] 1 test from %(fixture)s (0 ms total)\n'
- '\n') % {
- 'fixture': fixture,
- 'case': case,
- }
-
-
-def get_footer(number):
- return (
- '[----------] Global test environment tear-down\n'
- '[==========] %(number)d test from %(total)d test case ran. (0 ms total)\n'
- '[ PASSED ] %(number)d test.\n'
- '\n'
- ' YOU HAVE 5 DISABLED TESTS\n'
- '\n'
- ' YOU HAVE 2 tests with ignored failures (FAILS prefix)\n') % {
- 'number': number,
- 'total': TOTAL,
- }
-
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option('--gtest_list_tests', action='store_true')
- parser.add_option('--gtest_filter')
- options, args = parser.parse_args()
- if args:
- parser.error('Failed to process args %s' % args)
-
- if options.gtest_list_tests:
- for fixture, cases in TESTS.iteritems():
- print '%s.' % fixture
- for case in cases:
- print ' ' + case
- print ' YOU HAVE 2 tests with ignored failures (FAILS prefix)'
- print ''
- return 0
-
- if options.gtest_filter:
- # Simulate running one test.
- print 'Note: Google Test filter = %s\n' % options.gtest_filter
- print get_test_output(options.gtest_filter)
- print get_footer(1)
- # Make Baz.Fail fail.
- return options.gtest_filter == 'Baz.Fail'
-
- for fixture, cases in TESTS.iteritems():
- for case in cases:
- print get_test_output('%s.%s' % (fixture, case))
- print get_footer(TOTAL)
- return 6
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/run_test_from_archive/gtest_fake.results b/tools/isolate/tests/run_test_from_archive/gtest_fake.results
deleted file mode 100644
index 015c07f..0000000
--- a/tools/isolate/tests/run_test_from_archive/gtest_fake.results
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "command": ["python", "gtest_fake.py"],
- "files": {
- "gtest_fake.py": {
- "sha-1": "de55b412b3cda61304b959f872bc21025dc20fdb"
- }
- }
-}
diff --git a/tools/isolate/tests/run_test_from_archive/manifest1.results b/tools/isolate/tests/run_test_from_archive/manifest1.results
deleted file mode 100644
index 0e79090..0000000
--- a/tools/isolate/tests/run_test_from_archive/manifest1.results
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "files": {
- "file1.txt": {
- "sha-1": "a64969e7ebc5e3627680a3fbbaa1b9181d03f1b4"
- }
- }
-}
diff --git a/tools/isolate/tests/run_test_from_archive/manifest2.results b/tools/isolate/tests/run_test_from_archive/manifest2.results
deleted file mode 100644
index 004558a..0000000
--- a/tools/isolate/tests/run_test_from_archive/manifest2.results
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "includes": ["858e2f54f479c439ba529f893b9a54a5346986de"],
- "files": {
- "file2.txt": {
- "sha-1": "4c5f884b8c98cd2064d36ffaf8512ff264f08316"
- }
- }
-}
diff --git a/tools/isolate/tests/run_test_from_archive/repeated_files.py b/tools/isolate/tests/run_test_from_archive/repeated_files.py
deleted file mode 100755
index a5a49d1..0000000
--- a/tools/isolate/tests/run_test_from_archive/repeated_files.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Checks all the expected files are mapped."""
-
-import os
-import sys
-
-ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
-
-
-def main():
- expected = sorted([
- 'repeated_files.py',
- 'file1.txt',
- 'file1_copy.txt',
- ])
- actual = sorted(os.listdir(ROOT_DIR))
- if expected != actual:
- print >> sys.stderr, 'Expected list doesn\'t match:'
- print >> sys.stderr, ', '.join(expected)
- print >> sys.stderr, ', '.join(actual)
- return 1
-
- print 'Success'
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/run_test_from_archive/repeated_files.results b/tools/isolate/tests/run_test_from_archive/repeated_files.results
deleted file mode 100644
index 6368d06..0000000
--- a/tools/isolate/tests/run_test_from_archive/repeated_files.results
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "command": ["python", "repeated_files.py"],
- "files": {
- "repeated_files.py": {
- "sha-1": "100f28891f7e0ef7a902bfe46f21b2972377a9ac"
- },
- "file1.txt": {
- "sha-1": "a64969e7ebc5e3627680a3fbbaa1b9181d03f1b4"
- },
- "file1_copy.txt": {
- "sha-1": "a64969e7ebc5e3627680a3fbbaa1b9181d03f1b4"
- }
- }
-}
diff --git a/tools/isolate/tests/run_test_from_archive_smoke_test.py b/tools/isolate/tests/run_test_from_archive_smoke_test.py
deleted file mode 100755
index c8b2ff08..0000000
--- a/tools/isolate/tests/run_test_from_archive_smoke_test.py
+++ /dev/null
@@ -1,230 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import hashlib
-import json
-import logging
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-VERBOSE = False
-
-
-class CalledProcessError(subprocess.CalledProcessError):
- """Makes 2.6 version act like 2.7"""
- def __init__(self, returncode, cmd, output, stderr, cwd):
- super(CalledProcessError, self).__init__(returncode, cmd)
- self.output = output
- self.stderr = stderr
- self.cwd = cwd
-
- def __str__(self):
- return super(CalledProcessError, self).__str__() + (
- '\n'
- 'cwd=%s\n%s\n%s\n%s') % (
- self.cwd,
- self.output,
- self.stderr,
- ' '.join(self.cmd))
-
-
-def list_files_tree(directory):
- """Returns the list of all the files in a tree."""
- actual = []
- for root, _dirs, files in os.walk(directory):
- actual.extend(os.path.join(root, f)[len(directory)+1:] for f in files)
- return sorted(actual)
-
-
-def calc_sha1(filepath):
- """Calculates the SHA-1 hash for a file."""
- return hashlib.sha1(open(filepath, 'rb').read()).hexdigest()
-
-
-def write_content(filepath, content):
- with open(filepath, 'wb') as f:
- f.write(content)
-
-
-def write_json(filepath, data):
- with open(filepath, 'wb') as f:
- json.dump(data, f, sort_keys=True, indent=2)
-
-
-class RunTestFromArchive(unittest.TestCase):
- def setUp(self):
- self.tempdir = tempfile.mkdtemp(prefix='run_test_from_archive_smoke_test')
- logging.debug(self.tempdir)
- # The "source" hash table.
- self.table = os.path.join(self.tempdir, 'table')
- os.mkdir(self.table)
- # The slave-side cache.
- self.cache = os.path.join(self.tempdir, 'cache')
-
- self.data_dir = os.path.join(ROOT_DIR, 'tests', 'run_test_from_archive')
-
- def tearDown(self):
- shutil.rmtree(self.tempdir)
-
- def _result_tree(self):
- return list_files_tree(self.tempdir)
-
- @staticmethod
- def _run(args):
- cmd = [sys.executable, os.path.join(ROOT_DIR, 'run_test_from_archive.py')]
- cmd.extend(args)
- if VERBOSE:
- cmd.extend(['-v'] * 2)
- pipe = None
- else:
- pipe = subprocess.PIPE
- logging.debug(' '.join(cmd))
- proc = subprocess.Popen(
- cmd, stdout=pipe, stderr=pipe, universal_newlines=True)
- out, err = proc.communicate()
- return out, err, proc.returncode
-
- def _store_result(self, result_data):
- """Stores a .results file in the hash table."""
- result_text = json.dumps(result_data, sort_keys=True, indent=2)
- result_sha1 = hashlib.sha1(result_text).hexdigest()
- write_content(os.path.join(self.table, result_sha1), result_text)
- return result_sha1
-
- def _store(self, filename):
- """Stores a test data file in the table.
-
- Returns its sha-1 hash.
- """
- filepath = os.path.join(self.data_dir, filename)
- h = calc_sha1(filepath)
- shutil.copyfile(filepath, os.path.join(self.table, h))
- return h
-
- def _generate_args(self, sha1_hash):
- """Generates the standard arguments used with sha1_hash as the hash.
-
- Returns a list of the required arguments.
- """
- return [
- '--hash', sha1_hash,
- '--cache', self.cache,
- '--remote', self.table,
- ]
-
- def test_result(self):
- # Loads an arbitrary manifest on the file system.
- manifest = os.path.join(self.data_dir, 'gtest_fake.results')
- expected = [
- 'state.json',
- self._store('gtest_fake.py'),
- calc_sha1(manifest),
- ]
- args = [
- '--manifest', manifest,
- '--cache', self.cache,
- '--remote', self.table,
- ]
- out, err, returncode = self._run(args)
- if not VERBOSE:
- self.assertEquals('', err)
- self.assertEquals(1070, len(out), out)
- self.assertEquals(6, returncode)
- actual = list_files_tree(self.cache)
- self.assertEquals(sorted(expected), actual)
-
- def test_hash(self):
- # Loads the manifest from the store as a hash.
- result_sha1 = self._store('gtest_fake.results')
- expected = [
- 'state.json',
- self._store('gtest_fake.py'),
- result_sha1,
- ]
- args = [
- '--hash', result_sha1,
- '--cache', self.cache,
- '--remote', self.table,
- ]
- out, err, returncode = self._run(args)
- if not VERBOSE:
- self.assertEquals('', err)
- self.assertEquals(1070, len(out), out)
- self.assertEquals(6, returncode)
- actual = list_files_tree(self.cache)
- self.assertEquals(sorted(expected), actual)
-
- def test_fail_empty_manifest(self):
- result_sha1 = self._store_result({})
- expected = [
- 'state.json',
- result_sha1,
- ]
- out, err, returncode = self._run(self._generate_args(result_sha1))
- if not VERBOSE:
- self.assertEquals('', out)
- self.assertEquals('No command to run\n', err)
- self.assertEquals(1, returncode)
- actual = list_files_tree(self.cache)
- self.assertEquals(sorted(expected), actual)
-
- def test_includes(self):
- # Loads a manifest that includes another one.
-
- # References manifest1.results and gtest_fake.results. Maps file3.txt as
- # file2.txt.
- result_sha1 = self._store('check_files.results')
- expected = [
- 'state.json',
- self._store('check_files.py'),
- self._store('gtest_fake.py'),
- self._store('gtest_fake.results'),
- self._store('file1.txt'),
- self._store('file3.txt'),
- # Maps file1.txt.
- self._store('manifest1.results'),
- # References manifest1.results. Maps file2.txt but it is overriden.
- self._store('manifest2.results'),
- result_sha1,
- ]
- out, err, returncode = self._run(self._generate_args(result_sha1))
- if not VERBOSE:
- self.assertEquals('', err)
- self.assertEquals('Success\n', out)
- self.assertEquals(0, returncode)
- actual = list_files_tree(self.cache)
- self.assertEquals(sorted(expected), actual)
-
- def test_link_all_hash_instances(self):
- # Load a manifest file with the same file (same sha-1 hash), listed under
- # two different names and ensure both are created.
- result_sha1 = self._store('repeated_files.results')
- expected = [
- 'state.json',
- result_sha1,
- self._store('file1.txt'),
- self._store('repeated_files.py')
- ]
-
- out, err, returncode = self._run(self._generate_args(result_sha1))
- if not VERBOSE:
- self.assertEquals('', err)
- self.assertEquals('Success\n', out)
- self.assertEquals(0, returncode)
- actual = list_files_tree(self.cache)
- self.assertEquals(sorted(expected), actual)
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/tests/run_test_from_archive_test.py b/tools/isolate/tests/run_test_from_archive_test.py
deleted file mode 100755
index c56cee8..0000000
--- a/tools/isolate/tests/run_test_from_archive_test.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import sys
-import time
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-import run_test_from_archive
-
-
-class RemoteTest(run_test_from_archive.Remote):
- @staticmethod
- def get_file_handler(_):
- def upload_file(item, _dest):
- if type(item) == type(Exception) and issubclass(item, Exception):
- raise item()
- elif isinstance(item, int):
- time.sleep(int(item) / 100)
- return upload_file
-
-
-class RunTestFromArchiveTest(unittest.TestCase):
- def test_load_manifest_empty(self):
- m = run_test_from_archive.load_manifest('{}')
- self.assertEquals({}, m)
-
- def test_load_manifest_good(self):
- data = {
- u'command': [u'foo', u'bar'],
- u'files': {
- u'a': {
- u'link': u'somewhere',
- u'mode': 123,
- u'timestamp': 456,
- },
- u'b': {
- u'mode': 123,
- u'sha-1': u'0123456789abcdef0123456789abcdef01234567'
- }
- },
- u'includes': [u'0123456789abcdef0123456789abcdef01234567'],
- u'os': run_test_from_archive.get_flavor(),
- u'read_only': False,
- u'relative_cwd': u'somewhere_else'
- }
- m = run_test_from_archive.load_manifest(json.dumps(data))
- self.assertEquals(data, m)
-
- def test_load_manifest_bad(self):
- data = {
- u'files': {
- u'a': {
- u'link': u'somewhere',
- u'sha-1': u'0123456789abcdef0123456789abcdef01234567'
- }
- },
- }
- try:
- run_test_from_archive.load_manifest(json.dumps(data))
- self.fail()
- except run_test_from_archive.ConfigError:
- pass
-
- def test_load_manifest_os_only(self):
- data = {
- u'os': run_test_from_archive.get_flavor(),
- }
- m = run_test_from_archive.load_manifest(json.dumps(data))
- self.assertEquals(data, m)
-
- def test_load_manifest_os_bad(self):
- data = {
- u'os': 'foo',
- }
- try:
- run_test_from_archive.load_manifest(json.dumps(data))
- self.fail()
- except run_test_from_archive.ConfigError:
- pass
-
- def test_remote_no_errors(self):
- files_to_handle = 50
- remote = RemoteTest('')
-
- for i in range(files_to_handle):
- remote.add_item(run_test_from_archive.Remote.MED, i, i)
-
- for i in range(files_to_handle):
- self.assertNotEqual(-1, remote.get_result())
- self.assertEqual(None, remote.next_exception())
- remote.join()
-
- def test_remote_with_errors(self):
- remote = RemoteTest('')
-
- remote.add_item(run_test_from_archive.Remote.MED, IOError, '')
- remote.add_item(run_test_from_archive.Remote.MED, Exception, '')
- remote.join()
-
- self.assertNotEqual(None, remote.next_exception())
- self.assertNotEqual(None, remote.next_exception())
- self.assertEqual(None, remote.next_exception())
-
-
-if __name__ == '__main__':
- logging.basicConfig(
- level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))
- unittest.main()
diff --git a/tools/isolate/tests/trace_inputs/child1.py b/tools/isolate/tests/trace_inputs/child1.py
deleted file mode 100755
index f0ec8c9..0000000
--- a/tools/isolate/tests/trace_inputs/child1.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import subprocess
-import sys
-
-
-def child():
- """When the gyp argument is not specified, the command is started from
- --root-dir directory.
- """
- print 'child from %s' % os.getcwd()
- # Force file opening with a non-normalized path.
- open(os.path.join('tests', '..', 'trace_inputs.py'), 'rb').close()
- open(os.path.join(
- 'tests', '..', 'tests', 'trace_inputs_smoke_test.py'), 'rb').close()
- # Do not wait for the child to exit.
- # Use relative directory.
- subprocess.Popen(
- ['python', 'child2.py'], cwd=os.path.join('tests', 'trace_inputs'))
- return 0
-
-
-def child_gyp():
- """When the gyp argument is specified, the command is started from --cwd
- directory.
- """
- print 'child_gyp from %s' % os.getcwd()
- # Force file opening.
- open(os.path.join('..', 'trace_inputs.py'), 'rb').close()
- open(os.path.join('..', 'tests', 'trace_inputs_smoke_test.py'), 'rb').close()
- # Do not wait for the child to exit.
- # Use relative directory.
- subprocess.Popen(['python', 'child2.py'], cwd='trace_inputs')
- return 0
-
-
-def main():
- if sys.argv[1] == '--child':
- return child()
- if sys.argv[1] == '--child-gyp':
- return child_gyp()
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/trace_inputs/child2.py b/tools/isolate/tests/trace_inputs/child2.py
deleted file mode 100755
index f873389..0000000
--- a/tools/isolate/tests/trace_inputs/child2.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-import time
-
-
-def main():
- print 'child2'
- # Introduce a race condition with the parent so the parent may have a chance
- # to exit before the child. Will be random.
- time.sleep(.01)
-
- if sys.platform in ('darwin', 'win32'):
- # Check for case-insensitive file system. This happens on Windows and OSX.
- open('Test_File.txt', 'rb').close()
- else:
- open('test_file.txt', 'rb').close()
-
- expected = {
- 'bar': 'Foo\n',
- 'foo': 'Bar\n',
- }
-
- root = 'files1'
- actual = dict(
- (filename, open(os.path.join(root, filename), 'rb').read())
- for filename in (os.listdir(root))
- if (filename != 'do_not_care.txt' and
- os.path.isfile(os.path.join(root, filename))))
-
- if actual != expected:
- print 'Failure'
- print actual
- print expected
- return 1
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/trace_inputs/files1/bar b/tools/isolate/tests/trace_inputs/files1/bar
deleted file mode 100644
index bc56c4d..0000000
--- a/tools/isolate/tests/trace_inputs/files1/bar
+++ /dev/null
@@ -1 +0,0 @@
-Foo
diff --git a/tools/isolate/tests/trace_inputs/files1/do_not_care.txt b/tools/isolate/tests/trace_inputs/files1/do_not_care.txt
deleted file mode 100644
index 9a10460..0000000
--- a/tools/isolate/tests/trace_inputs/files1/do_not_care.txt
+++ /dev/null
@@ -1 +0,0 @@
-This file is ignored.
diff --git a/tools/isolate/tests/trace_inputs/files1/foo b/tools/isolate/tests/trace_inputs/files1/foo
deleted file mode 100644
index ebd7525..0000000
--- a/tools/isolate/tests/trace_inputs/files1/foo
+++ /dev/null
@@ -1 +0,0 @@
-Bar
diff --git a/tools/isolate/tests/trace_inputs/files2 b/tools/isolate/tests/trace_inputs/files2
deleted file mode 120000
index 49a73ae..0000000
--- a/tools/isolate/tests/trace_inputs/files2
+++ /dev/null
@@ -1 +0,0 @@
-files1 \ No newline at end of file
diff --git a/tools/isolate/tests/trace_inputs/ignored.txt b/tools/isolate/tests/trace_inputs/ignored.txt
deleted file mode 100644
index ca599e4..0000000
--- a/tools/isolate/tests/trace_inputs/ignored.txt
+++ /dev/null
@@ -1 +0,0 @@
-This file is not read.
diff --git a/tools/isolate/tests/trace_inputs/symlink.py b/tools/isolate/tests/trace_inputs/symlink.py
deleted file mode 100755
index d75412f..0000000
--- a/tools/isolate/tests/trace_inputs/symlink.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
- print 'symlink: touches files2/'
- assert len(sys.argv) == 1
-
- expected = {
- 'bar': 'Foo\n',
- 'foo': 'Bar\n',
- }
-
- if not os.path.basename(os.getcwd()) == 'tests':
- print 'Start this script from inside "tests"'
- return 1
-
- root = os.path.join('trace_inputs', 'files2')
- actual = dict(
- (filename, open(os.path.join(root, filename), 'rb').read())
- for filename in (os.listdir(root)))
-
- if actual != expected:
- print 'Failure'
- print actual
- print expected
- return 2
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/trace_inputs/test_file.txt b/tools/isolate/tests/trace_inputs/test_file.txt
deleted file mode 100644
index bc56c4d..0000000
--- a/tools/isolate/tests/trace_inputs/test_file.txt
+++ /dev/null
@@ -1 +0,0 @@
-Foo
diff --git a/tools/isolate/tests/trace_inputs/touch_only.py b/tools/isolate/tests/trace_inputs/touch_only.py
deleted file mode 100755
index 58eba2c..0000000
--- a/tools/isolate/tests/trace_inputs/touch_only.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Uses different APIs to touch a file."""
-
-import os
-import sys
-
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-
-
-def main():
- print 'Only look if a file exists but do not open it.'
- assert len(sys.argv) == 2
- path = os.path.join(BASE_DIR, 'test_file.txt')
- command = sys.argv[1]
- if command == 'access':
- return not os.access(path, os.R_OK)
- elif command == 'isfile':
- return not os.path.isfile(path)
- elif command == 'stat':
- return not os.stat(path).st_size
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/isolate/tests/trace_inputs_smoke_test.py b/tools/isolate/tests/trace_inputs_smoke_test.py
deleted file mode 100755
index 482be75..0000000
--- a/tools/isolate/tests/trace_inputs_smoke_test.py
+++ /dev/null
@@ -1,614 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-import run_test_cases
-
-FILENAME = os.path.basename(__file__)
-REL_DATA = os.path.join(u'tests', 'trace_inputs')
-VERBOSE = False
-
-
-class CalledProcessError(subprocess.CalledProcessError):
- """Makes 2.6 version act like 2.7"""
- def __init__(self, returncode, cmd, output, cwd):
- super(CalledProcessError, self).__init__(returncode, cmd)
- self.output = output
- self.cwd = cwd
-
- def __str__(self):
- return super(CalledProcessError, self).__str__() + (
- '\n'
- 'cwd=%s\n%s') % (self.cwd, self.output)
-
-
-class TraceInputsBase(unittest.TestCase):
- def setUp(self):
- self.tempdir = tempfile.mkdtemp(prefix='trace_smoke_test')
- self.log = os.path.join(self.tempdir, 'log')
- self.trace_inputs_path = os.path.join(ROOT_DIR, 'trace_inputs.py')
-
- # Wraps up all the differences between OSes here.
- # - Windows doesn't track initial_cwd.
- # - OSX replaces /usr/bin/python with /usr/bin/python2.7.
- self.cwd = os.path.join(ROOT_DIR, u'tests')
- self.initial_cwd = unicode(self.cwd)
- self.expected_cwd = unicode(ROOT_DIR)
- if sys.platform == 'win32':
- # Not supported on Windows.
- self.initial_cwd = None
- self.expected_cwd = None
-
- # There's 3 kinds of references to python, self.executable,
- # self.real_executable and self.naked_executable. It depends how python was
- # started.
- self.executable = sys.executable
- if sys.platform == 'darwin':
- # /usr/bin/python is a thunk executable that decides which version of
- # python gets executed.
- suffix = '.'.join(map(str, sys.version_info[0:2]))
- if os.access(self.executable + suffix, os.X_OK):
- # So it'll look like /usr/bin/python2.7
- self.executable += suffix
-
- import trace_inputs
- self.real_executable = trace_inputs.get_native_path_case(
- unicode(self.executable))
- trace_inputs = None
-
- # self.naked_executable will only be naked on Windows.
- self.naked_executable = unicode(sys.executable)
- if sys.platform == 'win32':
- self.naked_executable = os.path.basename(sys.executable)
-
- def tearDown(self):
- if VERBOSE:
- print 'Leaking: %s' % self.tempdir
- else:
- shutil.rmtree(self.tempdir)
-
- @staticmethod
- def get_child_command(from_data):
- """Returns command to run the child1.py."""
- cmd = [sys.executable]
- if from_data:
- # When the gyp argument is specified, the command is started from --cwd
- # directory. In this case, 'tests'.
- cmd.extend([os.path.join('trace_inputs', 'child1.py'), '--child-gyp'])
- else:
- # When the gyp argument is not specified, the command is started from
- # --root-dir directory.
- cmd.extend([os.path.join(REL_DATA, 'child1.py'), '--child'])
- return cmd
-
- @staticmethod
- def _size(*args):
- return os.stat(os.path.join(ROOT_DIR, *args)).st_size
-
-
-class TraceInputs(TraceInputsBase):
- def _execute(self, mode, command, cwd):
- cmd = [
- sys.executable,
- self.trace_inputs_path,
- mode,
- '--log', self.log,
- ]
- if VERBOSE:
- cmd.extend(['-v'] * 3)
- cmd.extend(command)
- logging.info('Command: %s' % ' '.join(cmd))
- p = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=cwd,
- universal_newlines=True)
- out, err = p.communicate()
- if VERBOSE:
- print err
- if p.returncode:
- raise CalledProcessError(p.returncode, cmd, out + err, cwd)
- return out or ''
-
- def _trace(self, from_data):
- if from_data:
- cwd = os.path.join(ROOT_DIR, 'tests')
- else:
- cwd = ROOT_DIR
- return self._execute('trace', self.get_child_command(from_data), cwd=cwd)
-
- def test_trace(self):
- expected = '\n'.join((
- 'Total: 7',
- 'Non existent: 0',
- 'Interesting: 7 reduced to 6',
- ' tests/trace_inputs/child1.py'.replace('/', os.path.sep),
- ' tests/trace_inputs/child2.py'.replace('/', os.path.sep),
- ' tests/trace_inputs/files1/'.replace('/', os.path.sep),
- ' tests/trace_inputs/test_file.txt'.replace('/', os.path.sep),
- (' tests/%s' % FILENAME).replace('/', os.path.sep),
- ' trace_inputs.py',
- )) + '\n'
- trace_expected = '\n'.join((
- 'child from %s' % ROOT_DIR,
- 'child2',
- )) + '\n'
- trace_actual = self._trace(False)
- actual = self._execute(
- 'read',
- [
- '--root-dir', ROOT_DIR,
- '--blacklist', '.+\\.pyc',
- '--blacklist', '.*\\.svn',
- '--blacklist', '.*do_not_care\\.txt',
- ],
- cwd=ROOT_DIR)
- self.assertEquals(expected, actual)
- self.assertEquals(trace_expected, trace_actual)
-
- def test_trace_json(self):
- expected = {
- u'root': {
- u'children': [
- {
- u'children': [],
- u'command': [u'python', u'child2.py'],
- u'executable': self.naked_executable,
- u'files': [
- {
- u'path': os.path.join(REL_DATA, 'child2.py'),
- u'size': self._size(REL_DATA, 'child2.py'),
- },
- {
- u'path': os.path.join(REL_DATA, 'files1', 'bar'),
- u'size': self._size(REL_DATA, 'files1', 'bar'),
- },
- {
- u'path': os.path.join(REL_DATA, 'files1', 'foo'),
- u'size': self._size(REL_DATA, 'files1', 'foo'),
- },
- {
- u'path': os.path.join(REL_DATA, 'test_file.txt'),
- u'size': self._size(REL_DATA, 'test_file.txt'),
- },
- ],
- u'initial_cwd': self.initial_cwd,
- #u'pid': 123,
- },
- ],
- u'command': [
- unicode(self.executable),
- os.path.join(u'trace_inputs', 'child1.py'),
- u'--child-gyp',
- ],
- u'executable': self.real_executable,
- u'files': [
- {
- u'path': os.path.join(REL_DATA, 'child1.py'),
- u'size': self._size(REL_DATA, 'child1.py'),
- },
- {
- u'path': os.path.join(u'tests', u'trace_inputs_smoke_test.py'),
- u'size': self._size('tests', 'trace_inputs_smoke_test.py'),
- },
- {
- u'path': u'trace_inputs.py',
- u'size': self._size('trace_inputs.py'),
- },
- ],
- u'initial_cwd': self.initial_cwd,
- #u'pid': 123,
- },
- }
- trace_expected = '\n'.join((
- 'child_gyp from %s' % os.path.join(ROOT_DIR, 'tests'),
- 'child2',
- )) + '\n'
- trace_actual = self._trace(True)
- actual_text = self._execute(
- 'read',
- [
- '--root-dir', ROOT_DIR,
- '--blacklist', '.+\\.pyc',
- '--blacklist', '.*\\.svn',
- '--blacklist', '.*do_not_care\\.txt',
- '--json',
- ],
- cwd=ROOT_DIR)
- actual_json = json.loads(actual_text)
- self.assertEquals(list, actual_json.__class__)
- self.assertEquals(1, len(actual_json))
- actual_json = actual_json[0]
- # Removes the pids.
- self.assertTrue(actual_json['root'].pop('pid'))
- self.assertTrue(actual_json['root']['children'][0].pop('pid'))
- self.assertEquals(expected, actual_json)
- self.assertEquals(trace_expected, trace_actual)
-
-
-class TraceInputsImport(TraceInputsBase):
- def setUp(self):
- super(TraceInputsImport, self).setUp()
- import trace_inputs
- self.trace_inputs = trace_inputs
-
- def tearDown(self):
- del self.trace_inputs
- super(TraceInputsImport, self).tearDown()
-
- # Similar to TraceInputs test fixture except that it calls the function
- # directly, so the Results instance can be inspected.
- # Roughly, make sure the API is stable.
- def _execute_trace(self, command):
- # Similar to what trace_test_cases.py does.
- api = self.trace_inputs.get_api()
- _, _ = self.trace_inputs.trace(
- self.log, command, self.cwd, api, True)
- # TODO(maruel): Check
- #self.assertEquals(0, returncode)
- #self.assertEquals('', output)
- def blacklist(f):
- return f.endswith(('.pyc', '.svn', 'do_not_care.txt'))
- return self.trace_inputs.load_trace(self.log, ROOT_DIR, api, blacklist)
-
- def _gen_dict_wrong_path(self):
- """Returns the expected flattened Results when child1.py is called with the
- wrong relative path.
- """
- return {
- 'root': {
- 'children': [],
- 'command': [
- self.executable,
- os.path.join(REL_DATA, 'child1.py'),
- '--child',
- ],
- 'executable': self.real_executable,
- 'files': [],
- 'initial_cwd': self.initial_cwd,
- },
- }
-
- def _gen_dict_full(self):
- """Returns the expected flattened Results when child1.py is called with
- --child.
- """
- return {
- 'root': {
- 'children': [
- {
- 'children': [],
- 'command': ['python', 'child2.py'],
- 'executable': self.naked_executable,
- 'files': [
- {
- 'path': os.path.join(REL_DATA, 'child2.py'),
- 'size': self._size(REL_DATA, 'child2.py'),
- },
- {
- 'path': os.path.join(REL_DATA, 'files1', 'bar'),
- 'size': self._size(REL_DATA, 'files1', 'bar'),
- },
- {
- 'path': os.path.join(REL_DATA, 'files1', 'foo'),
- 'size': self._size(REL_DATA, 'files1', 'foo'),
- },
- {
- 'path': os.path.join(REL_DATA, 'test_file.txt'),
- 'size': self._size(REL_DATA, 'test_file.txt'),
- },
- ],
- 'initial_cwd': self.expected_cwd,
- },
- ],
- 'command': [
- self.executable,
- os.path.join(REL_DATA, 'child1.py'),
- '--child',
- ],
- 'executable': self.real_executable,
- 'files': [
- {
- 'path': os.path.join(REL_DATA, 'child1.py'),
- 'size': self._size(REL_DATA, 'child1.py'),
- },
- {
- u'path': os.path.join(u'tests', u'trace_inputs_smoke_test.py'),
- 'size': self._size('tests', 'trace_inputs_smoke_test.py'),
- },
- {
- 'path': u'trace_inputs.py',
- 'size': self._size('trace_inputs.py'),
- },
- ],
- 'initial_cwd': self.expected_cwd,
- },
- }
-
- def _gen_dict_full_gyp(self):
- """Returns the expected flattened results when child1.py is called with
- --child-gyp.
- """
- return {
- 'root': {
- 'children': [
- {
- 'children': [],
- 'command': ['python', 'child2.py'],
- 'executable': self.naked_executable,
- 'files': [
- {
- 'path': os.path.join(REL_DATA, 'child2.py'),
- 'size': self._size(REL_DATA, 'child2.py'),
- },
- {
- 'path': os.path.join(REL_DATA, 'files1', 'bar'),
- 'size': self._size(REL_DATA, 'files1', 'bar'),
- },
- {
- 'path': os.path.join(REL_DATA, 'files1', 'foo'),
- 'size': self._size(REL_DATA, 'files1', 'foo'),
- },
- {
- 'path': os.path.join(REL_DATA, 'test_file.txt'),
- 'size': self._size(REL_DATA, 'test_file.txt'),
- },
- ],
- 'initial_cwd': self.initial_cwd,
- },
- ],
- 'command': [
- self.executable,
- os.path.join('trace_inputs', 'child1.py'),
- '--child-gyp',
- ],
- 'executable': self.real_executable,
- 'files': [
- {
- 'path': os.path.join(REL_DATA, 'child1.py'),
- 'size': self._size(REL_DATA, 'child1.py'),
- },
- {
- 'path': os.path.join(u'tests', u'trace_inputs_smoke_test.py'),
- 'size': self._size('tests', 'trace_inputs_smoke_test.py'),
- },
- {
- 'path': u'trace_inputs.py',
- 'size': self._size('trace_inputs.py'),
- },
- ],
- 'initial_cwd': self.initial_cwd,
- },
- }
-
- def test_trace_wrong_path(self):
- # Deliberately start the trace from the wrong path. Starts it from the
- # directory 'tests' so 'tests/tests/trace_inputs/child1.py' is not
- # accessible, so child2.py process is not started.
- results = self._execute_trace(self.get_child_command(False))
- expected = self._gen_dict_wrong_path()
- actual = results.flatten()
- self.assertTrue(actual['root'].pop('pid'))
- self.assertEquals(expected, actual)
-
- def test_trace(self):
- expected = self._gen_dict_full_gyp()
- results = self._execute_trace(self.get_child_command(True))
- actual = results.flatten()
- self.assertTrue(actual['root'].pop('pid'))
- self.assertTrue(actual['root']['children'][0].pop('pid'))
- self.assertEquals(expected, actual)
- files = [
- u'tests/trace_inputs/child1.py'.replace('/', os.path.sep),
- u'tests/trace_inputs/child2.py'.replace('/', os.path.sep),
- u'tests/trace_inputs/files1/'.replace('/', os.path.sep),
- u'tests/trace_inputs/test_file.txt'.replace('/', os.path.sep),
- u'tests/trace_inputs_smoke_test.py'.replace('/', os.path.sep),
- u'trace_inputs.py',
- ]
- def blacklist(f):
- return f.endswith(('.pyc', 'do_not_care.txt', '.git', '.svn'))
- simplified = self.trace_inputs.extract_directories(
- ROOT_DIR, results.files, blacklist)
- self.assertEquals(files, [f.path for f in simplified])
-
- def test_trace_multiple(self):
- # Starts parallel threads and trace parallel child processes simultaneously.
- # Some are started from 'tests' directory, others from this script's
- # directory. One trace fails. Verify everything still goes one.
- parallel = 8
-
- def trace(tracer, cmd, cwd, tracename):
- resultcode, output = tracer.trace(
- cmd, cwd, tracename, True)
- return (tracename, resultcode, output)
-
- with run_test_cases.ThreadPool(parallel) as pool:
- api = self.trace_inputs.get_api()
- with api.get_tracer(self.log) as tracer:
- pool.add_task(
- trace, tracer, self.get_child_command(False), ROOT_DIR, 'trace1')
- pool.add_task(
- trace, tracer, self.get_child_command(True), self.cwd, 'trace2')
- pool.add_task(
- trace, tracer, self.get_child_command(False), ROOT_DIR, 'trace3')
- pool.add_task(
- trace, tracer, self.get_child_command(True), self.cwd, 'trace4')
- # Have this one fail since it's started from the wrong directory.
- pool.add_task(
- trace, tracer, self.get_child_command(False), self.cwd, 'trace5')
- pool.add_task(
- trace, tracer, self.get_child_command(True), self.cwd, 'trace6')
- pool.add_task(
- trace, tracer, self.get_child_command(False), ROOT_DIR, 'trace7')
- pool.add_task(
- trace, tracer, self.get_child_command(True), self.cwd, 'trace8')
- trace_results = pool.join()
- def blacklist(f):
- return f.endswith(('.pyc', 'do_not_care.txt', '.git', '.svn'))
- actual_results = api.parse_log(self.log, blacklist)
- self.assertEquals(8, len(trace_results))
- self.assertEquals(8, len(actual_results))
-
- # Convert to dict keyed on the trace name, simpler to verify.
- trace_results = dict((i[0], i[1:]) for i in trace_results)
- actual_results = dict((x.pop('trace'), x) for x in actual_results)
- self.assertEquals(sorted(trace_results), sorted(actual_results))
-
- # It'd be nice to start different kinds of processes.
- expected_results = [
- self._gen_dict_full(),
- self._gen_dict_full_gyp(),
- self._gen_dict_full(),
- self._gen_dict_full_gyp(),
- self._gen_dict_wrong_path(),
- self._gen_dict_full_gyp(),
- self._gen_dict_full(),
- self._gen_dict_full_gyp(),
- ]
- self.assertEquals(len(expected_results), len(trace_results))
-
- # See the comment above about the trace that fails because it's started from
- # the wrong directory.
- busted = 4
- for index, key in enumerate(sorted(actual_results)):
- self.assertEquals('trace%d' % (index + 1), key)
- self.assertEquals(2, len(trace_results[key]))
- # returncode
- self.assertEquals(0 if index != busted else 2, trace_results[key][0])
- # output
- self.assertEquals(actual_results[key]['output'], trace_results[key][1])
-
- self.assertEquals(['output', 'results'], sorted(actual_results[key]))
- results = actual_results[key]['results']
- results = results.strip_root(ROOT_DIR)
- actual = results.flatten()
- self.assertTrue(actual['root'].pop('pid'))
- if index != busted:
- self.assertTrue(actual['root']['children'][0].pop('pid'))
- self.assertEquals(expected_results[index], actual)
-
- if sys.platform != 'win32':
- def test_trace_symlink(self):
- expected = {
- 'root': {
- 'children': [],
- 'command': [
- self.executable,
- os.path.join('trace_inputs', 'symlink.py'),
- ],
- 'executable': self.real_executable,
- 'files': [
- {
- 'path': os.path.join(REL_DATA, 'files2', 'bar'),
- 'size': self._size(REL_DATA, 'files2', 'bar'),
- },
- {
- 'path': os.path.join(REL_DATA, 'files2', 'foo'),
- 'size': self._size(REL_DATA, 'files2', 'foo'),
- },
- {
- 'path': os.path.join(REL_DATA, 'symlink.py'),
- 'size': self._size(REL_DATA, 'symlink.py'),
- },
- ],
- 'initial_cwd': self.initial_cwd,
- },
- }
- cmd = [sys.executable, os.path.join('trace_inputs', 'symlink.py')]
- results = self._execute_trace(cmd)
- actual = results.flatten()
- self.assertTrue(actual['root'].pop('pid'))
- self.assertEquals(expected, actual)
- files = [
- # In particular, the symlink is *not* resolved.
- u'tests/trace_inputs/files2/'.replace('/', os.path.sep),
- u'tests/trace_inputs/symlink.py'.replace('/', os.path.sep),
- ]
- def blacklist(f):
- return f.endswith(('.pyc', '.svn', 'do_not_care.txt'))
- simplified = self.trace_inputs.extract_directories(
- ROOT_DIR, results.files, blacklist)
- self.assertEquals(files, [f.path for f in simplified])
-
- def test_trace_quoted(self):
- results = self._execute_trace([sys.executable, '-c', 'print("hi")'])
- expected = {
- 'root': {
- 'children': [],
- 'command': [
- self.executable,
- '-c',
- 'print("hi")',
- ],
- 'executable': self.real_executable,
- 'files': [],
- 'initial_cwd': self.initial_cwd,
- },
- }
- actual = results.flatten()
- self.assertTrue(actual['root'].pop('pid'))
- self.assertEquals(expected, actual)
-
- def _touch_expected(self, command):
- # Looks for file that were touched but not opened, using different apis.
- results = self._execute_trace(
- [sys.executable, os.path.join('trace_inputs', 'touch_only.py'), command])
- expected = {
- 'root': {
- 'children': [],
- 'command': [
- self.executable,
- os.path.join('trace_inputs', 'touch_only.py'),
- command,
- ],
- 'executable': self.real_executable,
- 'files': [
- {
- 'path': os.path.join(REL_DATA, 'test_file.txt'),
- 'size': 0,
- },
- {
- 'path': os.path.join(REL_DATA, 'touch_only.py'),
- 'size': self._size(REL_DATA, 'touch_only.py'),
- },
- ],
- 'initial_cwd': self.initial_cwd,
- },
- }
- if sys.platform != 'linux2':
- # TODO(maruel): Remove once properly implemented.
- expected['root']['files'].pop(0)
-
- actual = results.flatten()
- self.assertTrue(actual['root'].pop('pid'))
- self.assertEquals(expected, actual)
-
- def test_trace_touch_only_access(self):
- self._touch_expected('access')
-
- def test_trace_touch_only_isfile(self):
- self._touch_expected('isfile')
-
- def test_trace_touch_only_stat(self):
- self._touch_expected('stat')
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/tests/trace_inputs_test.py b/tools/isolate/tests/trace_inputs_test.py
deleted file mode 100755
index 2c82f21..0000000
--- a/tools/isolate/tests/trace_inputs_test.py
+++ /dev/null
@@ -1,448 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import unittest
-import sys
-
-BASE_DIR = unicode(os.path.dirname(os.path.abspath(__file__)))
-ROOT_DIR = os.path.dirname(BASE_DIR)
-sys.path.insert(0, ROOT_DIR)
-
-FILE_PATH = unicode(os.path.abspath(__file__))
-
-import trace_inputs
-
-
-def join_norm(*args):
- """Joins and normalizes path in a single step."""
- return unicode(os.path.normpath(os.path.join(*args)))
-
-
-class TraceInputs(unittest.TestCase):
- def test_process_quoted_arguments(self):
- test_cases = (
- ('"foo"', ['foo']),
- ('"foo", "bar"', ['foo', 'bar']),
- ('"foo"..., "bar"', ['foo', 'bar']),
- ('"foo", "bar"...', ['foo', 'bar']),
- (
- '"/browser_tests", "--type=use,comma"',
- ['/browser_tests', '--type=use,comma']
- ),
- (
- '"/browser_tests", "--ignored=\\" --type=renderer \\""',
- ['/browser_tests', '--ignored=" --type=renderer "']
- ),
- )
- for actual, expected in test_cases:
- self.assertEquals(
- expected, trace_inputs.strace_process_quoted_arguments(actual))
-
- def test_process_escaped_arguments(self):
- test_cases = (
- ('foo\\0', ['foo']),
- ('foo\\001bar\\0', ['foo', 'bar']),
- ('\\"foo\\"\\0', ['"foo"']),
- )
- for actual, expected in test_cases:
- self.assertEquals(
- expected,
- trace_inputs.Dtrace.Context.process_escaped_arguments(actual))
-
- def test_variable_abs(self):
- value = trace_inputs.Results.File(None, '/foo/bar', False, False)
- actual = value.replace_variables({'$FOO': '/foo'})
- self.assertEquals('$FOO/bar', actual.path)
- self.assertEquals('$FOO/bar', actual.full_path)
- self.assertEquals(True, actual.tainted)
-
- def test_variable_rel(self):
- value = trace_inputs.Results.File('/usr', 'foo/bar', False, False)
- actual = value.replace_variables({'$FOO': 'foo'})
- self.assertEquals('$FOO/bar', actual.path)
- self.assertEquals(os.path.join('/usr', '$FOO/bar'), actual.full_path)
- self.assertEquals(True, actual.tainted)
-
- def test_native_case_end_with_os_path_sep(self):
- # Make sure the trailing os.path.sep is kept.
- path = trace_inputs.get_native_path_case(ROOT_DIR) + os.path.sep
- self.assertEquals(trace_inputs.get_native_path_case(path), path)
-
- def test_native_case_non_existing(self):
- # Make sure it doesn't throw on non-existing files.
- non_existing = 'trace_input_test_this_file_should_not_exist'
- path = os.path.expanduser('~/' + non_existing)
- self.assertFalse(os.path.exists(path))
- path = trace_inputs.get_native_path_case(ROOT_DIR) + os.path.sep
- self.assertEquals(trace_inputs.get_native_path_case(path), path)
-
- if sys.platform in ('darwin', 'win32'):
- def test_native_case_not_sensitive(self):
- # The home directory is almost guaranteed to have mixed upper/lower case
- # letters on both Windows and OSX.
- # This test also ensures that the output is independent on the input
- # string case.
- path = os.path.expanduser('~')
- self.assertTrue(os.path.isdir(path))
- # This test assumes the variable is in the native path case on disk, this
- # should be the case. Verify this assumption:
- self.assertEquals(path, trace_inputs.get_native_path_case(path))
- self.assertEquals(
- trace_inputs.get_native_path_case(path.lower()),
- trace_inputs.get_native_path_case(path.upper()))
-
- def test_native_case_not_sensitive_non_existent(self):
- # This test also ensures that the output is independent on the input
- # string case.
- non_existing = os.path.join(
- 'trace_input_test_this_dir_should_not_exist', 'really not', '')
- path = os.path.expanduser(os.path.join('~', non_existing))
- self.assertFalse(os.path.exists(path))
- lower = trace_inputs.get_native_path_case(path.lower())
- upper = trace_inputs.get_native_path_case(path.upper())
- # Make sure non-existing element is not modified:
- self.assertTrue(lower.endswith(non_existing.lower()))
- self.assertTrue(upper.endswith(non_existing.upper()))
- self.assertEquals(lower[:-len(non_existing)], upper[:-len(non_existing)])
-
- if sys.platform != 'win32':
- def test_symlink(self):
- # This test will fail if the checkout is in a symlink.
- actual = trace_inputs.split_at_symlink(None, ROOT_DIR)
- expected = (ROOT_DIR, None, None)
- self.assertEquals(expected, actual)
-
- actual = trace_inputs.split_at_symlink(
- None, os.path.join(BASE_DIR, 'trace_inputs'))
- expected = (
- os.path.join(BASE_DIR, 'trace_inputs'), None, None)
- self.assertEquals(expected, actual)
-
- actual = trace_inputs.split_at_symlink(
- None, os.path.join(BASE_DIR, 'trace_inputs', 'files2'))
- expected = (
- os.path.join(BASE_DIR, 'trace_inputs'), 'files2', '')
- self.assertEquals(expected, actual)
-
- actual = trace_inputs.split_at_symlink(
- ROOT_DIR, os.path.join('tests', 'trace_inputs', 'files2'))
- expected = (
- os.path.join('tests', 'trace_inputs'), 'files2', '')
- self.assertEquals(expected, actual)
- actual = trace_inputs.split_at_symlink(
- ROOT_DIR, os.path.join('tests', 'trace_inputs', 'files2', 'bar'))
- expected = (
- os.path.join('tests', 'trace_inputs'), 'files2', '/bar')
- self.assertEquals(expected, actual)
-
- def test_native_case_symlink_right_case(self):
- actual = trace_inputs.get_native_path_case(
- os.path.join(BASE_DIR, 'trace_inputs'))
- self.assertEquals('trace_inputs', os.path.basename(actual))
-
- # Make sure the symlink is not resolved.
- actual = trace_inputs.get_native_path_case(
- os.path.join(BASE_DIR, 'trace_inputs', 'files2'))
- self.assertEquals('files2', os.path.basename(actual))
-
- if sys.platform == 'darwin':
- def test_native_case_symlink_wrong_case(self):
- actual = trace_inputs.get_native_path_case(
- os.path.join(BASE_DIR, 'trace_inputs'))
- self.assertEquals('trace_inputs', os.path.basename(actual))
-
- # Make sure the symlink is not resolved.
- actual = trace_inputs.get_native_path_case(
- os.path.join(BASE_DIR, 'trace_inputs', 'Files2'))
- self.assertEquals('files2', os.path.basename(actual))
-
-
-if sys.platform != 'win32':
- class StraceInputs(unittest.TestCase):
- # Represents the root process pid (an arbitrary number).
- _ROOT_PID = 27
- _CHILD_PID = 14
- _GRAND_CHILD_PID = 70
-
- @staticmethod
- def _load_context(lines, initial_cwd):
- context = trace_inputs.Strace.Context(lambda _: False, initial_cwd)
- for line in lines:
- context.on_line(*line)
- return context.to_results().flatten()
-
- def _test_lines(self, lines, initial_cwd, files, command=None):
- filepath = join_norm(initial_cwd, '../out/unittests')
- command = command or ['../out/unittests']
- expected = {
- 'root': {
- 'children': [],
- 'command': command,
- 'executable': filepath,
- 'files': files,
- 'initial_cwd': initial_cwd,
- 'pid': self._ROOT_PID,
- }
- }
- if not files:
- expected['root']['command'] = None
- expected['root']['executable'] = None
- self.assertEquals(expected, self._load_context(lines, initial_cwd))
-
- def test_execve(self):
- lines = [
- (self._ROOT_PID,
- 'execve("/home/foo_bar_user/out/unittests", '
- '["/home/foo_bar_user/out/unittests", '
- '"--gtest_filter=AtExitTest.Basic"], [/* 44 vars */]) = 0'),
- (self._ROOT_PID,
- 'open("out/unittests.log", O_WRONLY|O_CREAT|O_APPEND, 0666) = 8'),
- ]
- files = [
- {
- 'path': u'/home/foo_bar_user/out/unittests',
- 'size': -1,
- },
- {
- 'path': u'/home/foo_bar_user/src/out/unittests.log',
- 'size': -1,
- },
- ]
- command = [
- '/home/foo_bar_user/out/unittests', '--gtest_filter=AtExitTest.Basic',
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', files, command)
-
- def test_empty(self):
- try:
- self._load_context([], None)
- self.fail()
- except trace_inputs.TracingFailure, e:
- expected = (
- 'Found internal inconsitency in process lifetime detection '
- 'while finding the root process',
- None,
- None,
- None,
- [])
- self.assertEquals(expected, e.args)
-
- def test_chmod(self):
- lines = [
- (self._ROOT_PID, 'chmod("temp/file", 0100644) = 0'),
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', [])
-
- def test_close(self):
- lines = [
- (self._ROOT_PID, 'close(7) = 0'),
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', [])
-
- def test_clone(self):
- # Grand-child with relative directory.
- lines = [
- (self._ROOT_PID,
- 'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
- '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
- self._CHILD_PID),
- (self._CHILD_PID,
- 'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
- '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
- self._GRAND_CHILD_PID),
- (self._GRAND_CHILD_PID,
- 'open("%s", O_RDONLY) = 76' % os.path.basename(FILE_PATH)),
- ]
- size = os.stat(FILE_PATH).st_size
- expected = {
- 'root': {
- 'children': [
- {
- 'children': [
- {
- 'children': [],
- 'command': None,
- 'executable': None,
- 'files': [
- {
- 'path': FILE_PATH,
- 'size': size,
- },
- ],
- 'initial_cwd': BASE_DIR,
- 'pid': self._GRAND_CHILD_PID,
- },
- ],
- 'command': None,
- 'executable': None,
- 'files': [],
- 'initial_cwd': BASE_DIR,
- 'pid': self._CHILD_PID,
- },
- ],
- 'command': None,
- 'executable': None,
- 'files': [],
- 'initial_cwd': BASE_DIR,
- 'pid': self._ROOT_PID,
- },
- }
- self.assertEquals(expected, self._load_context(lines, BASE_DIR))
-
- def test_clone_chdir(self):
- # Grand-child with relative directory.
- lines = [
- (self._ROOT_PID,
- 'execve("../out/unittests", '
- '["../out/unittests"...], [/* 44 vars */]) = 0'),
- (self._ROOT_PID,
- 'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
- '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
- self._CHILD_PID),
- (self._CHILD_PID,
- 'chdir("/home_foo_bar_user/path1") = 0'),
- (self._CHILD_PID,
- 'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
- '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
- self._GRAND_CHILD_PID),
- (self._GRAND_CHILD_PID,
- 'execve("../out/unittests", '
- '["../out/unittests"...], [/* 44 vars */]) = 0'),
- (self._ROOT_PID, 'chdir("/home_foo_bar_user/path2") = 0'),
- (self._GRAND_CHILD_PID,
- 'open("random.txt", O_RDONLY) = 76'),
- ]
- expected = {
- 'root': {
- 'children': [
- {
- 'children': [
- {
- 'children': [],
- 'command': ['../out/unittests'],
- 'executable': '/home_foo_bar_user/out/unittests',
- 'files': [
- {
- 'path': u'/home_foo_bar_user/out/unittests',
- 'size': -1,
- },
- {
- 'path': u'/home_foo_bar_user/path1/random.txt',
- 'size': -1,
- },
- ],
- 'initial_cwd': u'/home_foo_bar_user/path1',
- 'pid': self._GRAND_CHILD_PID,
- },
- ],
- # clone does not carry over the command and executable so it is
- # clear if an execve() call was done or not.
- 'command': None,
- 'executable': None,
- # This is important, since no execve call was done, it didn't
- # touch the executable file.
- 'files': [],
- 'initial_cwd': unicode(ROOT_DIR),
- 'pid': self._CHILD_PID,
- },
- ],
- 'command': ['../out/unittests'],
- 'executable': join_norm(ROOT_DIR, '../out/unittests'),
- 'files': [
- {
- 'path': join_norm(ROOT_DIR, '../out/unittests'),
- 'size': -1,
- },
- ],
- 'initial_cwd': unicode(ROOT_DIR),
- 'pid': self._ROOT_PID,
- },
- }
- self.assertEquals(expected, self._load_context(lines, ROOT_DIR))
-
- def test_open(self):
- lines = [
- (self._ROOT_PID,
- 'execve("../out/unittests", '
- '["../out/unittests"...], [/* 44 vars */]) = 0'),
- (self._ROOT_PID,
- 'open("out/unittests.log", O_WRONLY|O_CREAT|O_APPEND, 0666) = 8'),
- ]
- files = [
- {
- 'path': u'/home/foo_bar_user/out/unittests',
- 'size': -1,
- },
- {
- 'path': u'/home/foo_bar_user/src/out/unittests.log',
- 'size': -1,
- },
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', files)
-
- def test_open_resumed(self):
- lines = [
- (self._ROOT_PID,
- 'execve("../out/unittests", '
- '["../out/unittests"...], [/* 44 vars */]) = 0'),
- (self._ROOT_PID,
- 'open("out/unittests.log", O_WRONLY|O_CREAT|O_APPEND '
- '<unfinished ...>'),
- (self._ROOT_PID, '<... open resumed> ) = 3'),
- ]
- files = [
- {
- 'path': u'/home/foo_bar_user/out/unittests',
- 'size': -1,
- },
- {
- 'path': u'/home/foo_bar_user/src/out/unittests.log',
- 'size': -1,
- },
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', files)
-
- def test_rmdir(self):
- lines = [
- (self._ROOT_PID, 'rmdir("directory/to/delete") = 0'),
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', [])
-
- def test_setxattr(self):
- lines = [
- (self._ROOT_PID,
- 'setxattr("file.exe", "attribute", "value", 0, 0) = 0'),
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', [])
-
- def test_sig_unexpected(self):
- lines = [
- (self._ROOT_PID, 'exit_group(0) = ?'),
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', [])
-
- def test_stray(self):
- lines = [
- (self._ROOT_PID,
- 'execve("../out/unittests", '
- '["../out/unittests"...], [/* 44 vars */]) = 0'),
- (self._ROOT_PID,
- ') = ? <unavailable>'),
- ]
- files = [
- {
- 'path': u'/home/foo_bar_user/out/unittests',
- 'size': -1,
- },
- ]
- self._test_lines(lines, '/home/foo_bar_user/src', files)
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/tests/trace_test_cases_smoke_test.py b/tools/isolate/tests/trace_test_cases_smoke_test.py
deleted file mode 100755
index d3de106..0000000
--- a/tools/isolate/tests/trace_test_cases_smoke_test.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import re
-import subprocess
-import sys
-import tempfile
-import unittest
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-ROOT_DIR = os.path.dirname(BASE_DIR)
-sys.path.insert(0, ROOT_DIR)
-
-import trace_inputs
-
-FILE_PATH = os.path.realpath(unicode(os.path.abspath(__file__)))
-TARGET_UTIL_PATH = os.path.join(BASE_DIR, 'gtest_fake', 'gtest_fake_base.py')
-TARGET_PATH = os.path.join(BASE_DIR, 'gtest_fake', 'gtest_fake_fail.py')
-
-
-class TraceTestCases(unittest.TestCase):
- def setUp(self):
- self.temp_file = None
-
- self.initial_cwd = ROOT_DIR
- if sys.platform == 'win32':
- # Windows has no kernel mode concept of current working directory.
- self.initial_cwd = None
-
- # There's 2 kinds of references to python, self.executable,
- # self.real_executable. It depends how python was started and on which OS.
- self.executable = unicode(sys.executable)
- if sys.platform == 'darwin':
- # /usr/bin/python is a thunk executable that decides which version of
- # python gets executed.
- suffix = '.'.join(map(str, sys.version_info[0:2]))
- if os.access(self.executable + suffix, os.X_OK):
- # So it'll look like /usr/bin/python2.7
- self.executable += suffix
-
- self.real_executable = trace_inputs.get_native_path_case(self.executable)
- # Make sure there's no environment variable that could do side effects.
- os.environ.pop('GTEST_SHARD_INDEX', '')
- os.environ.pop('GTEST_TOTAL_SHARDS', '')
-
- def tearDown(self):
- if self.temp_file:
- os.remove(self.temp_file)
-
- def test_simple(self):
- file_handle, self.temp_file = tempfile.mkstemp(
- prefix='trace_test_cases_test')
- os.close(file_handle)
-
- cmd = [
- sys.executable,
- os.path.join(ROOT_DIR, 'trace_test_cases.py'),
- # Forces 4 parallel jobs.
- '--jobs', '4',
- '--timeout', '0',
- '--out', self.temp_file,
- ]
- if VERBOSE:
- cmd.extend(['-v'] * 3)
- cmd.append(TARGET_PATH)
- logging.debug(' '.join(cmd))
- proc = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = proc.communicate() or ('', '') # pylint is confused.
- self.assertEquals(0, proc.returncode, (out, err))
- lines = out.splitlines()
- expected_out_re = [
- r'\[1/4\] \d\.\d\ds .+',
- r'\[2/4\] \d\.\d\ds .+',
- r'\[3/4\] \d\.\d\ds .+',
- r'\[4/4\] \d\.\d\ds .+',
- ]
- self.assertEquals(len(expected_out_re), len(lines), lines)
- for index in range(len(expected_out_re)):
- self.assertTrue(
- re.match('^%s$' % expected_out_re[index], lines[index]),
- '%d: %s\n%r\n%s' % (
- index, expected_out_re[index], lines[index], out))
- # Junk is printed on win32.
- if sys.platform != 'win32' and not VERBOSE:
- self.assertEquals('', err)
-
- with open(self.temp_file, 'r') as f:
- content = f.read()
- try:
- result = json.loads(content)
- except:
- print repr(content)
- raise
-
- test_cases = (
- 'Baz.Fail',
- 'Foo.Bar1',
- 'Foo.Bar2',
- 'Foo.Bar3',
- )
- self.assertEquals(dict, result.__class__)
- self.assertEquals(['traces'], result.keys())
- for index, trace in enumerate(
- sorted(result['traces'], key=lambda x: x['trace'])):
- self.assertEquals(test_cases[index], trace['trace'])
- self.assertEquals(
- [u'cmd', u'cwd', u'output', u'pid', u'trace'], sorted(trace))
- self.assertEquals(
- [sys.executable, TARGET_PATH, '--gtest_filter=%s' % trace['trace']],
- trace['cmd'])
- self.assertEquals(int, trace['pid'].__class__)
-
-
-if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
diff --git a/tools/isolate/trace_inputs.py b/tools/isolate/trace_inputs.py
deleted file mode 100755
index 4e8df22..0000000
--- a/tools/isolate/trace_inputs.py
+++ /dev/null
@@ -1,3258 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Traces an executable and its child processes and extract the files accessed
-by them.
-
-The implementation uses OS-specific API. The native Kernel logger and the ETL
-interface is used on Windows. Dtrace is used on OSX. Strace is used otherwise.
-The OS-specific implementation is hidden in an 'API' interface.
-
-The results are embedded in a Results instance. The tracing is done in two
-phases, the first is to do the actual trace and generate an
-implementation-specific log file. Then the log file is parsed to extract the
-information, including the individual child processes and the files accessed
-from the log.
-"""
-
-import codecs
-import csv
-import getpass
-import glob
-import json
-import logging
-import optparse
-import os
-import re
-import subprocess
-import sys
-import tempfile
-import threading
-import time
-import weakref
-
-## OS-specific imports
-
-if sys.platform == 'win32':
- from ctypes.wintypes import byref, create_unicode_buffer, c_int, c_wchar_p
- from ctypes.wintypes import windll, FormatError # pylint: disable=E0611
- from ctypes.wintypes import GetLastError # pylint: disable=E0611
-elif sys.platform == 'darwin':
- import Carbon.File # pylint: disable=F0401
- import MacOS # pylint: disable=F0401
-
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
-
-
-class TracingFailure(Exception):
- """An exception occured during tracing."""
- def __init__(self, description, pid, line_number, line, *args):
- super(TracingFailure, self).__init__(
- description, pid, line_number, line, *args)
- self.description = description
- self.pid = pid
- self.line_number = line_number
- self.line = line
- self.extra = args
-
- def __str__(self):
- out = self.description
- if self.pid:
- out += '\npid: %d' % self.pid
- if self.line_number:
- out += '\nline: %d' % self.line_number
- if self.line:
- out += '\n%s' % self.line
- if self.extra:
- out += '\n' + ', '.join(map(str, filter(None, self.extra)))
- return out
-
-
-## OS-specific functions
-
-if sys.platform == 'win32':
- def QueryDosDevice(drive_letter):
- """Returns the Windows 'native' path for a DOS drive letter."""
- assert re.match(r'^[a-zA-Z]:$', drive_letter), drive_letter
- # Guesswork. QueryDosDeviceW never returns the required number of bytes.
- chars = 1024
- drive_letter = unicode(drive_letter)
- p = create_unicode_buffer(chars)
- if 0 == windll.kernel32.QueryDosDeviceW(drive_letter, p, chars):
- err = GetLastError()
- if err:
- # pylint: disable=E0602
- raise WindowsError(
- err,
- 'QueryDosDevice(%s): %s (%d)' % (
- str(drive_letter), FormatError(err), err))
- return p.value
-
-
- def GetShortPathName(long_path):
- """Returns the Windows short path equivalent for a 'long' path."""
- long_path = unicode(long_path)
- # Adds '\\\\?\\' when given an absolute path so the MAX_PATH (260) limit is
- # not enforced.
- if os.path.isabs(long_path) and not long_path.startswith('\\\\?\\'):
- long_path = '\\\\?\\' + long_path
- chars = windll.kernel32.GetShortPathNameW(long_path, None, 0)
- if chars:
- p = create_unicode_buffer(chars)
- if windll.kernel32.GetShortPathNameW(long_path, p, chars):
- return p.value
-
- err = GetLastError()
- if err:
- # pylint: disable=E0602
- raise WindowsError(
- err,
- 'GetShortPathName(%s): %s (%d)' % (
- str(long_path), FormatError(err), err))
-
-
- def GetLongPathName(short_path):
- """Returns the Windows long path equivalent for a 'short' path."""
- short_path = unicode(short_path)
- # Adds '\\\\?\\' when given an absolute path so the MAX_PATH (260) limit is
- # not enforced.
- if os.path.isabs(short_path) and not short_path.startswith('\\\\?\\'):
- short_path = '\\\\?\\' + short_path
- chars = windll.kernel32.GetLongPathNameW(short_path, None, 0)
- if chars:
- p = create_unicode_buffer(chars)
- if windll.kernel32.GetLongPathNameW(short_path, p, chars):
- return p.value
-
- err = GetLastError()
- if err:
- # pylint: disable=E0602
- raise WindowsError(
- err,
- 'GetLongPathName(%s): %s (%d)' % (
- str(short_path), FormatError(err), err))
-
-
- def get_current_encoding():
- """Returns the 'ANSI' code page associated to the process."""
- return 'cp%d' % int(windll.kernel32.GetACP())
-
-
- class DosDriveMap(object):
- """Maps \Device\HarddiskVolumeN to N: on Windows."""
- # Keep one global cache.
- _MAPPING = {}
-
- def __init__(self):
- """Lazy loads the cache."""
- if not self._MAPPING:
- # This is related to UNC resolver on windows. Ignore that.
- self._MAPPING['\\Device\\Mup'] = None
- self._MAPPING['\\SystemRoot'] = os.environ['SystemRoot']
-
- for letter in (chr(l) for l in xrange(ord('C'), ord('Z')+1)):
- try:
- letter = '%s:' % letter
- mapped = QueryDosDevice(letter)
- if mapped in self._MAPPING:
- logging.warn(
- ('Two drives: \'%s\' and \'%s\', are mapped to the same disk'
- '. Drive letters are a user-mode concept and the kernel '
- 'traces only have NT path, so all accesses will be '
- 'associated with the first drive letter, independent of the '
- 'actual letter used by the code') % (
- self._MAPPING[mapped], letter))
- else:
- self._MAPPING[mapped] = letter
- except WindowsError: # pylint: disable=E0602
- pass
-
- def to_win32(self, path):
- """Converts a native NT path to Win32/DOS compatible path."""
- match = re.match(r'(^\\Device\\[a-zA-Z0-9]+)(\\.*)?$', path)
- if not match:
- raise ValueError(
- 'Can\'t convert %s into a Win32 compatible path' % path,
- path)
- if not match.group(1) in self._MAPPING:
- # Unmapped partitions may be accessed by windows for the
- # fun of it while the test is running. Discard these.
- return None
- drive = self._MAPPING[match.group(1)]
- if not drive or not match.group(2):
- return drive
- return drive + match.group(2)
-
-
- def isabs(path):
- """Accepts X: as an absolute path, unlike python's os.path.isabs()."""
- return os.path.isabs(path) or len(path) == 2 and path[1] == ':'
-
-
- def get_native_path_case(p):
- """Returns the native path case for an existing file.
-
- On Windows, removes any leading '\\?\'.
- """
- if not isabs(p):
- raise ValueError(
- 'Can\'t get native path case for a non-absolute path: %s' % p,
- p)
- # Windows used to have an option to turn on case sensitivity on non Win32
- # subsystem but that's out of scope here and isn't supported anymore.
- # Go figure why GetShortPathName() is needed.
- try:
- out = GetLongPathName(GetShortPathName(p))
- except OSError, e:
- if e.args[0] in (2, 3, 5):
- # The path does not exist. Try to recurse and reconstruct the path.
- base = os.path.dirname(p)
- rest = os.path.basename(p)
- return os.path.join(get_native_path_case(base), rest)
- raise
- if out.startswith('\\\\?\\'):
- out = out[4:]
- # Always upper case the first letter since GetLongPathName() will return the
- # drive letter in the case it was given.
- return out[0].upper() + out[1:]
-
-
- def CommandLineToArgvW(command_line):
- """Splits a commandline into argv using CommandLineToArgvW()."""
- # http://msdn.microsoft.com/library/windows/desktop/bb776391.aspx
- size = c_int()
- ptr = windll.shell32.CommandLineToArgvW(unicode(command_line), byref(size))
- try:
- return [arg for arg in (c_wchar_p * size.value).from_address(ptr)]
- finally:
- windll.kernel32.LocalFree(ptr)
-
-
-elif sys.platform == 'darwin':
-
-
- # On non-windows, keep the stdlib behavior.
- isabs = os.path.isabs
-
-
- def _find_item_native_case(root_path, item):
- """Gets the native path case of a single item based at root_path.
-
- There is no API to get the native path case of symlinks on OSX. So it
- needs to be done the slow way.
- """
- item = item.lower()
- for element in os.listdir(root_path):
- if element.lower() == item:
- return element
-
-
- def _native_case(p):
- """Gets the native path case. Warning: this function resolves symlinks."""
- logging.debug('native_case(%s)' % p)
- try:
- rel_ref, _ = Carbon.File.FSPathMakeRef(p)
- out = rel_ref.FSRefMakePath()
- if p.endswith(os.path.sep) and not out.endswith(os.path.sep):
- return out + os.path.sep
- return out
- except MacOS.Error, e:
- if e.args[0] in (-43, -120):
- # The path does not exist. Try to recurse and reconstruct the path.
- # -43 means file not found.
- # -120 means directory not found.
- base = os.path.dirname(p)
- rest = os.path.basename(p)
- return os.path.join(_native_case(base), rest)
- raise OSError(
- e.args[0], 'Failed to get native path for %s' % p, p, e.args[1])
-
-
- def _split_at_symlink_native(base_path, rest):
- """Returns the native path for a symlink."""
- base, symlink, rest = split_at_symlink(base_path, rest)
- if symlink:
- if not base_path:
- base_path = base
- else:
- base_path = safe_join(base_path, base)
- symlink = _find_item_native_case(base_path, symlink)
- return base, symlink, rest
-
-
- def get_native_path_case(path):
- """Returns the native path case for an existing file.
-
- Technically, it's only HFS+ on OSX that is case preserving and
- insensitive. It's the default setting on HFS+ but can be changed.
- """
- if not isabs(path):
- raise ValueError(
- 'Can\'t get native path case for a non-absolute path: %s' % path,
- path)
- if path.startswith('/dev'):
- # /dev is not visible from Carbon, causing an exception.
- return path
-
- # Starts assuming there is no symlink along the path.
- resolved = _native_case(path)
- if resolved.lower() == path.lower():
- # This code path is incredibly faster.
- return resolved
-
- # There was a symlink, process it.
- base, symlink, rest = _split_at_symlink_native(None, path)
- assert symlink, (path, base, symlink, rest, resolved)
- prev = base
- base = safe_join(_native_case(base), symlink)
- assert len(base) > len(prev)
- while rest:
- prev = base
- relbase, symlink, rest = _split_at_symlink_native(base, rest)
- base = safe_join(base, relbase)
- assert len(base) > len(prev), (prev, base, symlink)
- if symlink:
- base = safe_join(base, symlink)
- assert len(base) > len(prev), (prev, base, symlink)
- # Make sure no symlink was resolved.
- assert base.lower() == path.lower(), (base, path)
- return base
-
-
-else: # OSes other than Windows and OSX.
-
-
- # On non-windows, keep the stdlib behavior.
- isabs = os.path.isabs
-
-
- def get_native_path_case(path):
- """Returns the native path case for an existing file.
-
- On OSes other than OSX and Windows, assume the file system is
- case-sensitive.
-
- TODO(maruel): This is not strictly true. Implement if necessary.
- """
- if not isabs(path):
- raise ValueError(
- 'Can\'t get native path case for a non-absolute path: %s' % path,
- path)
- # Give up on cygwin, as GetLongPathName() can't be called.
- # Linux traces tends to not be normalized so use this occasion to normalize
- # it. This function implementation already normalizes the path on the other
- # OS so this needs to be done here to be coherent between OSes.
- out = os.path.normpath(path)
- if path.endswith(os.path.sep) and not out.endswith(os.path.sep):
- return out + os.path.sep
- return out
-
-
-if sys.platform != 'win32': # All non-Windows OSes.
-
-
- def safe_join(*args):
- """Joins path elements like os.path.join() but doesn't abort on absolute
- path.
-
- os.path.join('foo', '/bar') == '/bar'
- but safe_join('foo', '/bar') == 'foo/bar'.
- """
- out = ''
- for element in args:
- if element.startswith(os.path.sep):
- if out.endswith(os.path.sep):
- out += element[1:]
- else:
- out += element
- else:
- if out.endswith(os.path.sep):
- out += element
- else:
- out += os.path.sep + element
- return out
-
-
- def split_at_symlink(base_dir, relfile):
- """Scans each component of relfile and cut the string at the symlink if
- there is any.
-
- Returns a tuple (base_path, symlink, rest), with symlink == rest == None if
- not symlink was found.
- """
- if base_dir:
- assert relfile
- assert os.path.isabs(base_dir)
- index = 0
- else:
- assert os.path.isabs(relfile)
- index = 1
-
- def at_root(rest):
- if base_dir:
- return safe_join(base_dir, rest)
- return rest
-
- while True:
- try:
- index = relfile.index(os.path.sep, index)
- except ValueError:
- index = len(relfile)
- full = at_root(relfile[:index])
- if os.path.islink(full):
- # A symlink!
- base = os.path.dirname(relfile[:index])
- symlink = os.path.basename(relfile[:index])
- rest = relfile[index:]
- logging.debug(
- 'split_at_symlink(%s, %s) -> (%s, %s, %s)' %
- (base_dir, relfile, base, symlink, rest))
- return base, symlink, rest
- if index == len(relfile):
- break
- index += 1
- return relfile, None, None
-
-
-def fix_python_path(cmd):
- """Returns the fixed command line to call the right python executable."""
- out = cmd[:]
- if out[0] == 'python':
- out[0] = sys.executable
- elif out[0].endswith('.py'):
- out.insert(0, sys.executable)
- return out
-
-
-def create_thunk():
- handle, name = tempfile.mkstemp(prefix='trace_inputs_thunk', suffix='.py')
- os.write(
- handle,
- (
- 'import subprocess\n'
- 'import sys\n'
- 'sys.exit(subprocess.call(sys.argv[2:]))\n'
- ))
- os.close(handle)
- return name
-
-
-def strace_process_quoted_arguments(text):
- """Extracts quoted arguments on a string and return the arguments as a list.
-
- Implemented as an automaton. Supports incomplete strings in the form
- '"foo"...'.
-
- Example:
- With text = '"foo", "bar"', the function will return ['foo', 'bar']
-
- TODO(maruel): Implement escaping.
- """
- # All the possible states of the DFA.
- ( NEED_QUOTE, # Begining of a new arguments.
- INSIDE_STRING, # Inside an argument.
- ESCAPED, # Found a '\' inside a quote. Treat the next char as-is.
- NEED_COMMA_OR_DOT, # Right after the closing quote of an argument. Could be
- # a serie of 3 dots or a comma.
- NEED_SPACE, # Right after a comma
- NEED_DOT_2, # Found a dot, need a second one.
- NEED_DOT_3, # Found second dot, need a third one.
- NEED_COMMA, # Found third dot, need a comma.
- ) = range(8)
-
- state = NEED_QUOTE
- out = []
- for index, char in enumerate(text):
- if char == '"':
- if state == NEED_QUOTE:
- state = INSIDE_STRING
- # A new argument was found.
- out.append('')
- elif state == INSIDE_STRING:
- # The argument is now closed.
- state = NEED_COMMA_OR_DOT
- elif state == ESCAPED:
- out[-1] += char
- state = INSIDE_STRING
- else:
- raise ValueError(
- 'Can\'t process char at column %d for: %r' % (index, text),
- index,
- text)
- elif char == ',':
- if state in (NEED_COMMA_OR_DOT, NEED_COMMA):
- state = NEED_SPACE
- elif state == INSIDE_STRING:
- out[-1] += char
- elif state == ESCAPED:
- out[-1] += char
- state = INSIDE_STRING
- else:
- raise ValueError(
- 'Can\'t process char at column %d for: %r' % (index, text),
- index,
- text)
- elif char == ' ':
- if state == NEED_SPACE:
- state = NEED_QUOTE
- elif state == INSIDE_STRING:
- out[-1] += char
- elif state == ESCAPED:
- out[-1] += char
- state = INSIDE_STRING
- else:
- raise ValueError(
- 'Can\'t process char at column %d for: %r' % (index, text),
- index,
- text)
- elif char == '.':
- if state == NEED_COMMA_OR_DOT:
- # The string is incomplete, this mean the strace -s flag should be
- # increased.
- state = NEED_DOT_2
- elif state == NEED_DOT_2:
- state = NEED_DOT_3
- elif state == NEED_DOT_3:
- state = NEED_COMMA
- elif state == INSIDE_STRING:
- out[-1] += char
- elif state == ESCAPED:
- out[-1] += char
- state = INSIDE_STRING
- else:
- raise ValueError(
- 'Can\'t process char at column %d for: %r' % (index, text),
- index,
- text)
- elif char == '\\':
- if state == ESCAPED:
- out[-1] += char
- state = INSIDE_STRING
- elif state == INSIDE_STRING:
- state = ESCAPED
- else:
- raise ValueError(
- 'Can\'t process char at column %d for: %r' % (index, text),
- index,
- text)
- else:
- if state == INSIDE_STRING:
- out[-1] += char
- else:
- raise ValueError(
- 'Can\'t process char at column %d for: %r' % (index, text),
- index,
- text)
- if state not in (NEED_COMMA, NEED_COMMA_OR_DOT):
- raise ValueError(
- 'String is incorrectly terminated: %r' % text,
- text)
- return out
-
-
-def read_json(filepath):
- with open(filepath, 'r') as f:
- return json.load(f)
-
-
-def write_json(filepath_or_handle, data, dense):
- """Writes data into filepath or file handle encoded as json.
-
- If dense is True, the json is packed. Otherwise, it is human readable.
- """
- if hasattr(filepath_or_handle, 'write'):
- if dense:
- filepath_or_handle.write(
- json.dumps(data, sort_keys=True, separators=(',',':')))
- else:
- filepath_or_handle.write(json.dumps(data, sort_keys=True, indent=2))
- else:
- with open(filepath_or_handle, 'wb') as f:
- if dense:
- json.dump(data, f, sort_keys=True, separators=(',',':'))
- else:
- json.dump(data, f, sort_keys=True, indent=2)
-
-
-class Results(object):
- """Results of a trace session."""
-
- class _TouchedObject(object):
- """Something, a file or a directory, that was accessed."""
- def __init__(self, root, path, tainted, size, nb_files):
- logging.debug(
- '%s(%s, %s, %s, %s, %s)' %
- (self.__class__.__name__, root, path, tainted, size, nb_files))
- self.root = root
- self.path = path
- self.tainted = tainted
- self.nb_files = nb_files
- # Can be used as a cache or a default value, depending on context.
- self._size = size
- # These are cache only.
- self._real_path = None
-
- # Check internal consistency.
- assert path, path
- assert tainted or bool(root) != bool(isabs(path)), (root, path)
- assert tainted or (
- not os.path.exists(self.full_path) or
- (self.full_path == get_native_path_case(self.full_path))), (
- tainted, self.full_path, get_native_path_case(self.full_path))
-
- @property
- def existent(self):
- return self.size != -1
-
- @property
- def full_path(self):
- if self.root:
- return os.path.join(self.root, self.path)
- return self.path
-
- @property
- def real_path(self):
- """Returns the path with symlinks resolved."""
- if not self._real_path:
- self._real_path = os.path.realpath(self.full_path)
- return self._real_path
-
- @property
- def size(self):
- """File's size. -1 is not existent."""
- if self._size is None and not self.tainted:
- try:
- self._size = os.stat(self.full_path).st_size
- except OSError:
- self._size = -1
- return self._size
-
- def flatten(self):
- """Returns a dict representing this object.
-
- A 'size' of 0 means the file was only touched and not read.
- """
- return {
- 'path': self.path,
- 'size': self.size,
- }
-
- def replace_variables(self, variables):
- """Replaces the root of this File with one of the variables if it matches.
-
- If a variable replacement occurs, the cloned object becomes tainted.
- """
- for variable, root_path in variables.iteritems():
- if self.path.startswith(root_path):
- return self._clone(
- self.root, variable + self.path[len(root_path):], True)
- # No need to clone, returns ourself.
- return self
-
- def strip_root(self, root):
- """Returns a clone of itself with 'root' stripped off."""
- # Check internal consistency.
- assert self.tainted or (isabs(root) and root.endswith(os.path.sep)), root
- if not self.full_path.startswith(root):
- # Now try to resolve the symlinks to see if it can be reached this way.
- # Only try *after* trying without resolving symlink.
- if not self.real_path.startswith(root):
- return None
- path = self.real_path
- else:
- path = self.full_path
- return self._clone(root, path[len(root):], self.tainted)
-
- def _clone(self, new_root, new_path, tainted):
- raise NotImplementedError(self.__class__.__name__)
-
- class File(_TouchedObject):
- """A file that was accessed. May not be present anymore.
-
- If tainted is true, it means it is not a real path anymore as a variable
- replacement occured.
-
- If touched_only is True, this means the file was probed for existence, and
- it is existent, but was never _opened_. If touched_only is True, the file
- must have existed.
- """
- def __init__(self, root, path, tainted, size):
- super(Results.File, self).__init__(root, path, tainted, size, 1)
-
- def _clone(self, new_root, new_path, tainted):
- """Clones itself keeping meta-data."""
- # Keep the self.size and self._real_path caches for performance reason. It
- # is also important when the file becomes tainted (with a variable instead
- # of the real path) since self.path is not an on-disk path anymore so
- # out._size cannot be updated.
- out = self.__class__(new_root, new_path, tainted, self.size)
- out._real_path = self._real_path
- return out
-
- class Directory(_TouchedObject):
- """A directory of files. Must exist."""
- def __init__(self, root, path, tainted, size, nb_files):
- """path='.' is a valid value and must be handled appropriately."""
- assert not path.endswith(os.path.sep), path
- super(Results.Directory, self).__init__(
- root, path + os.path.sep, tainted, size, nb_files)
- # For a Directory instance, self.size is not a cache, it's an actual value
- # that is never modified and represents the total size of the files
- # contained in this directory. It is possible that the directory is empty
- # so that size == 0; this happens if there's only an invalid symlink in
- # it.
-
- def flatten(self):
- out = super(Results.Directory, self).flatten()
- out['nb_files'] = self.nb_files
- return out
-
- def _clone(self, new_root, new_path, tainted):
- """Clones itself keeping meta-data."""
- out = self.__class__(
- new_root,
- new_path.rstrip(os.path.sep),
- tainted,
- self.size,
- self.nb_files)
- out._real_path = self._real_path
- return out
-
- class Process(object):
- """A process that was traced.
-
- Contains references to the files accessed by this process and its children.
- """
- def __init__(self, pid, files, executable, command, initial_cwd, children):
- logging.debug('Process(%s, %d, ...)' % (pid, len(files)))
- self.pid = pid
- self.files = sorted(files, key=lambda x: x.path)
- self.children = children
- self.executable = executable
- self.command = command
- self.initial_cwd = initial_cwd
-
- # Check internal consistency.
- assert len(set(f.path for f in self.files)) == len(self.files), sorted(
- f.path for f in self.files)
- assert isinstance(self.children, list)
- assert isinstance(self.files, list)
-
- @property
- def all(self):
- for child in self.children:
- for i in child.all:
- yield i
- yield self
-
- def flatten(self):
- return {
- 'children': [c.flatten() for c in self.children],
- 'command': self.command,
- 'executable': self.executable,
- 'files': [f.flatten() for f in self.files],
- 'initial_cwd': self.initial_cwd,
- 'pid': self.pid,
- }
-
- def strip_root(self, root):
- assert isabs(root) and root.endswith(os.path.sep), root
- # Loads the files after since they are constructed as objects.
- out = self.__class__(
- self.pid,
- filter(None, (f.strip_root(root) for f in self.files)),
- self.executable,
- self.command,
- self.initial_cwd,
- [c.strip_root(root) for c in self.children])
- logging.debug(
- 'strip_root(%s) %d -> %d' % (root, len(self.files), len(out.files)))
- return out
-
- def __init__(self, process):
- self.process = process
- # Cache.
- self._files = None
-
- def flatten(self):
- return {
- 'root': self.process.flatten(),
- }
-
- @property
- def files(self):
- if self._files is None:
- self._files = sorted(
- sum((p.files for p in self.process.all), []),
- key=lambda x: x.path)
- return self._files
-
- @property
- def existent(self):
- return [f for f in self.files if f.existent]
-
- @property
- def non_existent(self):
- return [f for f in self.files if not f.existent]
-
- def strip_root(self, root):
- """Returns a clone with all the files outside the directory |root| removed
- and converts all the path to be relative paths.
- """
- # Resolve any symlink
- root = os.path.realpath(root)
- root = get_native_path_case(root).rstrip(os.path.sep) + os.path.sep
- logging.debug('strip_root(%s)' % root)
- return Results(self.process.strip_root(root))
-
-
-class ApiBase(object):
- """OS-agnostic API to trace a process and its children."""
- class Context(object):
- """Processes one log line at a time and keeps the list of traced processes.
-
- The parsing is complicated by the fact that logs are traced out of order for
- strace but in-order for dtrace and logman. In addition, on Windows it is
- very frequent that processids are reused so a flat list cannot be used. But
- at the same time, it is impossible to faithfully construct a graph when the
- logs are processed out of order. So both a tree and a flat mapping are used,
- the tree is the real process tree, while the flat mapping stores the last
- valid process for the corresponding processid. For the strace case, the
- tree's head is guessed at the last moment.
- """
- class Process(object):
- """Keeps context for one traced child process.
-
- Logs all the files this process touched. Ignores directories.
- """
- def __init__(self, blacklist, pid, initial_cwd):
- # Check internal consistency.
- assert isinstance(pid, int), repr(pid)
- self.pid = pid
- # children are Process instances.
- self.children = []
- self.initial_cwd = initial_cwd
- self.cwd = None
- self.files = set()
- self.only_touched = set()
- self.executable = None
- self.command = None
- self._blacklist = blacklist
-
- def to_results_process(self):
- """Resolves file case sensitivity and or late-bound strings."""
- # When resolving files, it's normal to get dupe because a file could be
- # opened multiple times with different case. Resolve the deduplication
- # here.
- def fix_path(x):
- """Returns the native file path case.
-
- Converts late-bound strings.
- """
- if not x:
- # Do not convert None instance to 'None'.
- return x
- # TODO(maruel): Do not upconvert to unicode here, on linux we don't
- # know the file path encoding so they must be treated as bytes.
- x = unicode(x)
- if os.path.isabs(x):
- # If the path is not absolute, which tends to happen occasionally on
- # Windows, it is not possible to get the native path case so ignore
- # that trace. It mostly happens for 'executable' value.
- x = get_native_path_case(x)
- return x
-
- def fix_and_blacklist_path(x):
- x = fix_path(x)
- if not x:
- return
- # The blacklist needs to be reapplied, since path casing could
- # influence blacklisting.
- if self._blacklist(x):
- return
- return x
-
- # Filters out directories. Some may have passed through.
- files = set(f for f in map(fix_and_blacklist_path, self.files) if f)
- only_touched = set(
- f for f in map(fix_and_blacklist_path, self.only_touched) if f)
- only_touched -= files
-
- files = [
- Results.File(None, f, False, None) for f in files
- if not os.path.isdir(f)
- ]
- # Using 0 as size means the file's content is ignored since the file was
- # never opened for I/O.
- files.extend(
- Results.File(None, f, False, 0) for f in only_touched
- if not os.path.isdir(f)
- )
- return Results.Process(
- self.pid,
- files,
- fix_path(self.executable),
- self.command,
- fix_path(self.initial_cwd),
- [c.to_results_process() for c in self.children])
-
- def add_file(self, filepath, touch_only):
- """Adds a file if it passes the blacklist."""
- if self._blacklist(unicode(filepath)):
- return
- logging.debug('add_file(%d, %s, %s)' % (self.pid, filepath, touch_only))
- # Note that filepath and not unicode(filepath) is added. It is because
- # filepath could be something else than a string, like a RelativePath
- # instance for dtrace logs.
- if touch_only:
- self.only_touched.add(filepath)
- else:
- self.files.add(filepath)
-
- def __init__(self, blacklist):
- self.blacklist = blacklist
- # Initial process.
- self.root_process = None
- # dict to accelerate process lookup, to not have to lookup the whole graph
- # each time.
- self._process_lookup = {}
-
- class Tracer(object):
- """During it's lifetime, the tracing subsystem is enabled."""
- def __init__(self, logname):
- self._logname = logname
- self._lock = threading.Lock()
- self._traces = []
- self._initialized = True
-
- def trace(self, cmd, cwd, tracename, output):
- """Runs the OS-specific trace program on an executable.
-
- Arguments:
- - cmd: The command (a list) to run.
- - cwd: Current directory to start the child process in.
- - tracename: Name of the trace in the logname file.
- - output: If False, redirects output to PIPEs.
-
- Returns a tuple (resultcode, output) and updates the internal trace
- entries.
- """
- # The implementation adds an item to self._traces.
- raise NotImplementedError(self.__class__.__name__)
-
- def close(self, _timeout=None):
- """Saves the meta-data in the logname file.
-
- For kernel-based tracing, stops the tracing subsystem.
-
- Must not be used manually when using 'with' construct.
- """
- with self._lock:
- assert self._initialized
- try:
- data = {
- 'traces': self._traces,
- }
- write_json(self._logname, data, False)
- finally:
- self._initialized = False
-
- def post_process_log(self):
- """Post-processes the log so it becomes faster to load afterward.
-
- Must not be used manually when using 'with' construct.
- """
- assert not self._initialized, 'Must stop tracing first.'
-
- def __enter__(self):
- """Enables 'with' statement."""
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- """Enables 'with' statement."""
- self.close()
- # If an exception was thrown, do not process logs.
- if not exc_type:
- self.post_process_log()
-
- def get_tracer(self, logname):
- """Returns an ApiBase.Tracer instance.
-
- Initializes the tracing subsystem, which is a requirement for kernel-based
- tracers. Only one tracer instance should be live at a time!
-
- logname is the filepath to the json file that will contain the meta-data
- about the logs.
- """
- return self.Tracer(logname)
-
- @staticmethod
- def clean_trace(logname):
- """Deletes an old log."""
- raise NotImplementedError()
-
- @classmethod
- def parse_log(cls, logname, blacklist):
- """Processes trace logs and returns the files opened and the files that do
- not exist.
-
- It does not track directories.
-
- Most of the time, files that do not exist are temporary test files that
- should be put in /tmp instead. See http://crbug.com/116251.
-
- Returns a list of dict with keys:
- - results: A Results instance.
- - trace: The corresponding tracename parameter provided to
- get_tracer().trace().
- - output: Output gathered during execution, if get_tracer().trace(...,
- output=False) was used.
- """
- raise NotImplementedError(cls.__class__.__name__)
-
-
-class Strace(ApiBase):
- """strace implies linux."""
- class Context(ApiBase.Context):
- """Processes a strace log line and keeps the list of existent and non
- existent files accessed.
-
- Ignores directories.
-
- Uses late-binding to processes the cwd of each process. The problem is that
- strace generates one log file per process it traced but doesn't give any
- information about which process was started when and by who. So we don't
- even know which process is the initial one. So process the logs out of
- order and use late binding with RelativePath to be able to deduce the
- initial directory of each process once all the logs are parsed.
- """
- class Process(ApiBase.Context.Process):
- """Represents the state of a process.
-
- Contains all the information retrieved from the pid-specific log.
- """
- # Function names are using ([a-z_0-9]+)
- # This is the most common format. function(args) = result
- RE_HEADER = re.compile(r'^([a-z_0-9]+)\((.+?)\)\s+= (.+)$')
- # An interrupted function call, only grab the minimal header.
- RE_UNFINISHED = re.compile(r'^([^\(]+)(.*) \<unfinished \.\.\.\>$')
- # A resumed function call.
- RE_RESUMED = re.compile(r'^<\.\.\. ([^ ]+) resumed> (.+)$')
- # A process received a signal.
- RE_SIGNAL = re.compile(r'^--- SIG[A-Z]+ .+ ---')
- # A process didn't handle a signal. Ignore any junk appearing before,
- # because the process was forcibly killed so it won't open any new file.
- RE_KILLED = re.compile(
- r'^.*\+\+\+ killed by ([A-Z]+)( \(core dumped\))? \+\+\+$')
- # The process has exited.
- RE_PROCESS_EXITED = re.compile(r'^\+\+\+ exited with (\d+) \+\+\+')
- # A call was canceled. Ignore any prefix.
- RE_UNAVAILABLE = re.compile(r'^.*\)\s*= \? <unavailable>$')
- # Happens when strace fails to even get the function name.
- UNNAMED_FUNCTION = '????'
-
- # Corner-case in python, a class member function decorator must not be
- # @staticmethod.
- def parse_args(regexp, expect_zero): # pylint: disable=E0213
- """Automatically convert the str 'args' into a list of processed
- arguments.
-
- Arguments:
- - regexp is used to parse args.
- - expect_zero: one of True, False or None.
- - True: will check for result.startswith('0') first and will ignore
- the trace line completely otherwise. This is important because for
- many functions, the regexp will not process if the call failed.
- - False: will check for not result.startswith(('?', '-1')) for the
- same reason than with True.
- - None: ignore result.
- """
- def meta_hook(function):
- assert function.__name__.startswith('handle_')
- def hook(self, args, result):
- if expect_zero is True and not result.startswith('0'):
- return
- if expect_zero is False and result.startswith(('?', '-1')):
- return
- match = re.match(regexp, args)
- if not match:
- raise TracingFailure(
- 'Failed to parse %s(%s) = %s' %
- (function.__name__[len('handle_'):], args, result),
- None, None, None)
- return function(self, match.groups(), result)
- return hook
- return meta_hook
-
- class RelativePath(object):
- """A late-bound relative path."""
- def __init__(self, parent, value):
- self.parent = parent
- self.value = value
-
- def render(self):
- """Returns the current directory this instance is representing.
-
- This function is used to return the late-bound value.
- """
- if self.value and self.value.startswith(u'/'):
- # An absolute path.
- return self.value
- parent = self.parent.render() if self.parent else u'<None>'
- if self.value:
- return os.path.normpath(os.path.join(parent, self.value))
- return parent
-
- def __unicode__(self):
- """Acts as a string whenever needed."""
- return unicode(self.render())
-
- def __str__(self):
- """Acts as a string whenever needed."""
- return str(self.render())
-
- def __init__(self, root, pid):
- """Keeps enough information to be able to guess the original process
- root.
-
- strace doesn't store which process was the initial process. So more
- information needs to be kept so the graph can be reconstructed from the
- flat map.
- """
- logging.info('%s(%d)' % (self.__class__.__name__, pid))
- super(Strace.Context.Process, self).__init__(root.blacklist, pid, None)
- assert isinstance(root, ApiBase.Context)
- self._root = weakref.ref(root)
- # The dict key is the function name of the pending call, like 'open'
- # or 'execve'.
- self._pending_calls = {}
- self._line_number = 0
- # Current directory when the process started.
- self.initial_cwd = self.RelativePath(self._root(), None)
- self.parentid = None
-
- def get_cwd(self):
- """Returns the best known value of cwd."""
- return self.cwd or self.initial_cwd
-
- def render(self):
- """Returns the string value of the RelativePath() object.
-
- Used by RelativePath. Returns the initial directory and not the
- current one since the current directory 'cwd' validity is time-limited.
-
- The validity is only guaranteed once all the logs are processed.
- """
- return self.initial_cwd.render()
-
- def on_line(self, line):
- self._line_number += 1
- if self.RE_SIGNAL.match(line):
- # Ignore signals.
- return
-
- try:
- match = self.RE_KILLED.match(line)
- if match:
- # Converts a '+++ killed by Foo +++' trace into an exit_group().
- self.handle_exit_group(match.group(1), None)
- return
-
- match = self.RE_PROCESS_EXITED.match(line)
- if match:
- # Converts a '+++ exited with 1 +++' trace into an exit_group()
- self.handle_exit_group(match.group(1), None)
- return
-
- match = self.RE_UNFINISHED.match(line)
- if match:
- if match.group(1) in self._pending_calls:
- raise TracingFailure(
- 'Found two unfinished calls for the same function',
- None, None, None,
- self._pending_calls)
- self._pending_calls[match.group(1)] = (
- match.group(1) + match.group(2))
- return
-
- match = self.RE_UNAVAILABLE.match(line)
- if match:
- # This usually means a process was killed and a pending call was
- # canceled.
- # TODO(maruel): Look up the last exit_group() trace just above and
- # make sure any self._pending_calls[anything] is properly flushed.
- return
-
- match = self.RE_RESUMED.match(line)
- if match:
- if match.group(1) not in self._pending_calls:
- raise TracingFailure(
- 'Found a resumed call that was not logged as unfinished',
- None, None, None,
- self._pending_calls)
- pending = self._pending_calls.pop(match.group(1))
- # Reconstruct the line.
- line = pending + match.group(2)
-
- match = self.RE_HEADER.match(line)
- if not match:
- raise TracingFailure(
- 'Found an invalid line: %s' % line,
- None, None, None)
- if match.group(1) == self.UNNAMED_FUNCTION:
- return
-
- # It's a valid line, handle it.
- handler = getattr(self, 'handle_%s' % match.group(1), None)
- if not handler:
- self._handle_unknown(match.group(1), match.group(2), match.group(3))
- return handler(match.group(2), match.group(3))
- except TracingFailure, e:
- # Hack in the values since the handler could be a static function.
- e.pid = self.pid
- e.line = line
- e.line_number = self._line_number
- # Re-raise the modified exception.
- raise
- except (KeyError, NotImplementedError, ValueError), e:
- raise TracingFailure(
- 'Trace generated a %s exception: %s' % (
- e.__class__.__name__, str(e)),
- self.pid,
- self._line_number,
- line,
- e)
-
- @parse_args(r'^\"(.+?)\", [FKORWX_|]+$', True)
- def handle_access(self, args, _result):
- self._handle_file(args[0], True)
-
- @parse_args(r'^\"(.+?)\"$', True)
- def handle_chdir(self, args, _result):
- """Updates cwd."""
- self.cwd = self.RelativePath(self, args[0])
- logging.debug('handle_chdir(%d, %s)' % (self.pid, self.cwd))
-
- def handle_clone(self, _args, result):
- """Transfers cwd."""
- if result.startswith(('?', '-1')):
- # The call failed.
- return
- # Update the other process right away.
- childpid = int(result)
- child = self._root().get_or_set_proc(childpid)
- if child.parentid is not None or childpid in self.children:
- raise TracingFailure(
- 'Found internal inconsitency in process lifetime detection '
- 'during a clone() call',
- None, None, None)
-
- # Copy the cwd object.
- child.initial_cwd = self.get_cwd()
- child.parentid = self.pid
- # It is necessary because the logs are processed out of order.
- self.children.append(child)
-
- def handle_close(self, _args, _result):
- pass
-
- def handle_chmod(self, _args, _result):
- pass
-
- def handle_creat(self, _args, _result):
- # Ignore files created, since they didn't need to exist.
- pass
-
- @parse_args(r'^\"(.+?)\", \[(.+)\], \[\/\* \d+ vars? \*\/\]$', True)
- def handle_execve(self, args, _result):
- # Even if in practice execve() doesn't returns when it succeeds, strace
- # still prints '0' as the result.
- filepath = args[0]
- self._handle_file(filepath, False)
- self.executable = self.RelativePath(self.get_cwd(), filepath)
- self.command = strace_process_quoted_arguments(args[1])
-
- def handle_exit_group(self, _args, _result):
- """Removes cwd."""
- self.cwd = None
-
- def handle_fork(self, args, result):
- self._handle_unknown('fork', args, result)
-
- def handle_getcwd(self, _args, _result):
- pass
-
- @parse_args(r'^\"(.+?)\", \"(.+?)\"$', True)
- def handle_link(self, args, _result):
- self._handle_file(args[0], False)
- self._handle_file(args[1], False)
-
- @parse_args(r'\"(.+?)\", \{.+?, \.\.\.\}', True)
- def handle_lstat(self, args, _result):
- self._handle_file(args[0], True)
-
- def handle_mkdir(self, _args, _result):
- pass
-
- @parse_args(r'^\"(.*?)\", ([A-Z\_\|]+)(|, \d+)$', False)
- def handle_open(self, args, _result):
- if 'O_DIRECTORY' in args[1]:
- return
- self._handle_file(args[0], False)
-
- @parse_args(r'^(\d+|AT_FDCWD), \"(.*?)\", ([A-Z\_\|]+)(|, \d+)$', False)
- def handle_openat(self, args, _result):
- if 'O_DIRECTORY' in args[2]:
- return
- if args[1] == 'AT_FDCWD':
- self._handle_file(args[1], False)
- else:
- # TODO(maruel): Implement relative open if necessary instead of the
- # AT_FDCWD flag, let's hope not since this means tracking all active
- # directory handles.
- raise Exception('Relative open via openat not implemented.')
-
- @parse_args(r'^\"(.+?)\", \".+?\"(\.\.\.)?, \d+$', False)
- def handle_readlink(self, args, _result):
- self._handle_file(args[0], False)
-
- @parse_args(r'^\"(.+?)\", \"(.+?)\"$', True)
- def handle_rename(self, args, _result):
- self._handle_file(args[0], False)
- self._handle_file(args[1], False)
-
- def handle_rmdir(self, _args, _result):
- pass
-
- def handle_setxattr(self, _args, _result):
- pass
-
- @parse_args(r'\"(.+?)\", \{.+?, \.\.\.\}', True)
- def handle_stat(self, args, _result):
- self._handle_file(args[0], True)
-
- def handle_symlink(self, _args, _result):
- pass
-
- def handle_unlink(self, _args, _result):
- # In theory, the file had to be created anyway.
- pass
-
- def handle_statfs(self, _args, _result):
- pass
-
- def handle_vfork(self, args, result):
- self._handle_unknown('vfork', args, result)
-
- @staticmethod
- def _handle_unknown(function, args, result):
- raise TracingFailure(
- 'Unexpected/unimplemented trace %s(%s)= %s' %
- (function, args, result),
- None, None, None)
-
- def _handle_file(self, filepath, touch_only):
- filepath = self.RelativePath(self.get_cwd(), filepath)
- #assert not touch_only, unicode(filepath)
- self.add_file(filepath, touch_only)
-
- def __init__(self, blacklist, initial_cwd):
- super(Strace.Context, self).__init__(blacklist)
- self.initial_cwd = initial_cwd
-
- def render(self):
- """Returns the string value of the initial cwd of the root process.
-
- Used by RelativePath.
- """
- return self.initial_cwd
-
- def on_line(self, pid, line):
- """Transfers control into the Process.on_line() function."""
- self.get_or_set_proc(pid).on_line(line.strip())
-
- def to_results(self):
- """Finds back the root process and verify consistency."""
- # TODO(maruel): Absolutely unecessary, fix me.
- root = [p for p in self._process_lookup.itervalues() if not p.parentid]
- if len(root) != 1:
- raise TracingFailure(
- 'Found internal inconsitency in process lifetime detection '
- 'while finding the root process',
- None,
- None,
- None,
- sorted(p.pid for p in root))
- self.root_process = root[0]
- process = self.root_process.to_results_process()
- if sorted(self._process_lookup) != sorted(p.pid for p in process.all):
- raise TracingFailure(
- 'Found internal inconsitency in process lifetime detection '
- 'while looking for len(tree) == len(list)',
- None,
- None,
- None,
- sorted(self._process_lookup),
- sorted(p.pid for p in process.all))
- return Results(process)
-
- def get_or_set_proc(self, pid):
- """Returns the Context.Process instance for this pid or creates a new one.
- """
- if not pid or not isinstance(pid, int):
- raise TracingFailure(
- 'Unpexpected value for pid: %r' % pid,
- pid,
- None,
- None,
- pid)
- if pid not in self._process_lookup:
- self._process_lookup[pid] = self.Process(self, pid)
- return self._process_lookup[pid]
-
- @classmethod
- def traces(cls):
- """Returns the list of all handled traces to pass this as an argument to
- strace.
- """
- prefix = 'handle_'
- return [i[len(prefix):] for i in dir(cls.Process) if i.startswith(prefix)]
-
- class Tracer(ApiBase.Tracer):
- MAX_LEN = 256
-
- def trace(self, cmd, cwd, tracename, output):
- """Runs strace on an executable."""
- logging.info('trace(%s, %s, %s, %s)' % (cmd, cwd, tracename, output))
- assert os.path.isabs(cmd[0]), cmd[0]
- assert os.path.isabs(cwd), cwd
- assert os.path.normpath(cwd) == cwd, cwd
- with self._lock:
- if not self._initialized:
- raise TracingFailure(
- 'Called Tracer.trace() on an unitialized object',
- None, None, None, tracename)
- assert tracename not in (i['trace'] for i in self._traces)
- stdout = stderr = None
- if output:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
- # Ensure all file related APIs are hooked.
- traces = ','.join(Strace.Context.traces() + ['file'])
- trace_cmd = [
- 'strace',
- '-ff',
- '-s', '%d' % self.MAX_LEN,
- '-e', 'trace=%s' % traces,
- '-o', self._logname + '.' + tracename,
- ]
- child = subprocess.Popen(
- trace_cmd + cmd,
- cwd=cwd,
- stdin=subprocess.PIPE,
- stdout=stdout,
- stderr=stderr)
- out = child.communicate()[0]
- # TODO(maruel): Walk the logs and figure out the root process would
- # simplify parsing the logs a *lot*.
- with self._lock:
- assert tracename not in (i['trace'] for i in self._traces)
- self._traces.append(
- {
- 'cmd': cmd,
- 'cwd': cwd,
- # The pid of strace process, not very useful.
- 'pid': child.pid,
- 'trace': tracename,
- 'output': out,
- })
- return child.returncode, out
-
- @staticmethod
- def clean_trace(logname):
- if os.path.isfile(logname):
- os.remove(logname)
- # Also delete any pid specific file from previous traces.
- for i in glob.iglob(logname + '.*'):
- if i.rsplit('.', 1)[1].isdigit():
- os.remove(i)
-
- @classmethod
- def parse_log(cls, logname, blacklist):
- logging.info('parse_log(%s, %s)' % (logname, blacklist))
- data = read_json(logname)
- out = []
- for item in data['traces']:
- result = {
- 'trace': item['trace'],
- 'output': item['output'],
- }
- try:
- context = cls.Context(blacklist, item['cwd'])
- for pidfile in glob.iglob('%s.%s.*' % (logname, item['trace'])):
- pid = pidfile.rsplit('.', 1)[1]
- if pid.isdigit():
- pid = int(pid)
- # TODO(maruel): Load as utf-8
- for line in open(pidfile, 'rb'):
- context.on_line(pid, line)
- result['results'] = context.to_results()
- except TracingFailure, e:
- result['exception'] = e
- out.append(result)
- return out
-
-
-class Dtrace(ApiBase):
- """Uses DTrace framework through dtrace. Requires root access.
-
- Implies Mac OSX.
-
- dtruss can't be used because it has compatibility issues with python.
-
- Also, the pid->cwd handling needs to be done manually since OSX has no way to
- get the absolute path of the 'cwd' dtrace variable from the probe.
-
- Also, OSX doesn't populate curpsinfo->pr_psargs properly, see
- https://discussions.apple.com/thread/1980539. So resort to handling execve()
- manually.
-
- errno is not printed in the log since this implementation currently only cares
- about files that were successfully opened.
- """
- class Context(ApiBase.Context):
- # Format: index pid function(args)
- RE_HEADER = re.compile(r'^\d+ (\d+) ([a-zA-Z_\-]+)\((.*?)\)$')
-
- # Arguments parsing.
- RE_DTRACE_BEGIN = re.compile(r'^\"(.+?)\"$')
- RE_CHDIR = re.compile(r'^\"(.+?)\"$')
- RE_EXECVE = re.compile(r'^\"(.+?)\", \[(\d+), (.+)\]$')
- RE_OPEN = re.compile(r'^\"(.+?)\", (0x[0-9a-z]+), (0x[0-9a-z]+)$')
- RE_PROC_START = re.compile(r'^(\d+), \"(.+?)\", (\d+)$')
- RE_RENAME = re.compile(r'^\"(.+?)\", \"(.+?)\"$')
-
- O_DIRECTORY = 0x100000
-
- class Process(ApiBase.Context.Process):
- def __init__(self, *args):
- super(Dtrace.Context.Process, self).__init__(*args)
- self.cwd = self.initial_cwd
-
- def __init__(self, blacklist, tracer_pid, initial_cwd):
- logging.info(
- '%s(%d, %s)' % (self.__class__.__name__, tracer_pid, initial_cwd))
- super(Dtrace.Context, self).__init__(blacklist)
- # Process ID of the temporary script created by create_thunk().
- self._tracer_pid = tracer_pid
- self._initial_cwd = initial_cwd
- self._line_number = 0
-
- def on_line(self, line):
- self._line_number += 1
- match = self.RE_HEADER.match(line)
- if not match:
- raise TracingFailure(
- 'Found malformed line: %s' % line,
- None,
- self._line_number,
- line)
- fn = getattr(
- self,
- 'handle_%s' % match.group(2).replace('-', '_'),
- self._handle_ignored)
- # It is guaranteed to succeed because of the regexp. Or at least I thought
- # it would.
- pid = int(match.group(1))
- try:
- return fn(pid, match.group(3))
- except TracingFailure, e:
- # Hack in the values since the handler could be a static function.
- e.pid = pid
- e.line = line
- e.line_number = self._line_number
- # Re-raise the modified exception.
- raise
- except (KeyError, NotImplementedError, ValueError), e:
- raise TracingFailure(
- 'Trace generated a %s exception: %s' % (
- e.__class__.__name__, str(e)),
- pid,
- self._line_number,
- line,
- e)
-
- def to_results(self):
- process = self.root_process.to_results_process()
- # Internal concistency check.
- if sorted(self._process_lookup) != sorted(p.pid for p in process.all):
- raise TracingFailure(
- 'Found internal inconsitency in process lifetime detection '
- 'while looking for len(tree) == len(list)',
- None,
- None,
- None,
- sorted(self._process_lookup),
- sorted(p.pid for p in process.all))
- return Results(process)
-
- def handle_dtrace_BEGIN(self, _pid, args):
- if not self.RE_DTRACE_BEGIN.match(args):
- raise TracingFailure(
- 'Found internal inconsitency in dtrace_BEGIN log line',
- None, None, None)
-
- def handle_proc_start(self, pid, args):
- """Transfers cwd.
-
- The dtrace script already takes care of only tracing the processes that
- are child of the traced processes so there is no need to verify the
- process hierarchy.
- """
- if pid in self._process_lookup:
- raise TracingFailure(
- 'Found internal inconsitency in proc_start: %d started two times' %
- pid,
- None, None, None)
- match = self.RE_PROC_START.match(args)
- if not match:
- raise TracingFailure(
- 'Failed to parse arguments: %s' % args,
- None, None, None)
- ppid = int(match.group(1))
- if ppid == self._tracer_pid and not self.root_process:
- proc = self.root_process = self.Process(
- self.blacklist, pid, self._initial_cwd)
- elif ppid in self._process_lookup:
- proc = self.Process(self.blacklist, pid, self._process_lookup[ppid].cwd)
- self._process_lookup[ppid].children.append(proc)
- else:
- # Another process tree, ignore.
- return
- self._process_lookup[pid] = proc
- logging.debug(
- 'New child: %s -> %d cwd:%s' %
- (ppid, pid, unicode(proc.initial_cwd)))
-
- def handle_proc_exit(self, pid, _args):
- """Removes cwd."""
- if pid in self._process_lookup:
- # self._tracer_pid is not traced itself and other traces run neither.
- self._process_lookup[pid].cwd = None
-
- def handle_execve(self, pid, args):
- """Sets the process' executable.
-
- TODO(maruel): Read command line arguments. See
- https://discussions.apple.com/thread/1980539 for an example.
- https://gist.github.com/1242279
-
- Will have to put the answer at http://stackoverflow.com/questions/7556249.
- :)
- """
- if not pid in self._process_lookup:
- # Another process tree, ignore.
- return
- match = self.RE_EXECVE.match(args)
- if not match:
- raise TracingFailure(
- 'Failed to parse arguments: %r' % args,
- None, None, None)
- proc = self._process_lookup[pid]
- proc.executable = match.group(1)
- proc.command = self.process_escaped_arguments(match.group(3))
- if int(match.group(2)) != len(proc.command):
- raise TracingFailure(
- 'Failed to parse execve() arguments: %s' % args,
- None, None, None)
-
- def handle_chdir(self, pid, args):
- """Updates cwd."""
- if pid not in self._process_lookup:
- # Another process tree, ignore.
- return
- cwd = self.RE_CHDIR.match(args).group(1)
- if not cwd.startswith('/'):
- cwd2 = os.path.join(self._process_lookup[pid].cwd, cwd)
- logging.debug('handle_chdir(%d, %s) -> %s' % (pid, cwd, cwd2))
- else:
- logging.debug('handle_chdir(%d, %s)' % (pid, cwd))
- cwd2 = cwd
- self._process_lookup[pid].cwd = cwd2
-
- def handle_open_nocancel(self, pid, args):
- """Redirects to handle_open()."""
- return self.handle_open(pid, args)
-
- def handle_open(self, pid, args):
- if pid not in self._process_lookup:
- # Another process tree, ignore.
- return
- match = self.RE_OPEN.match(args)
- if not match:
- raise TracingFailure(
- 'Failed to parse arguments: %s' % args,
- None, None, None)
- flag = int(match.group(2), 16)
- if self.O_DIRECTORY & flag == self.O_DIRECTORY:
- # Ignore directories.
- return
- self._handle_file(pid, match.group(1))
-
- def handle_rename(self, pid, args):
- if pid not in self._process_lookup:
- # Another process tree, ignore.
- return
- match = self.RE_RENAME.match(args)
- if not match:
- raise TracingFailure(
- 'Failed to parse arguments: %s' % args,
- None, None, None)
- self._handle_file(pid, match.group(1))
- self._handle_file(pid, match.group(2))
-
- def _handle_file(self, pid, filepath):
- if not filepath.startswith('/'):
- filepath = os.path.join(self._process_lookup[pid].cwd, filepath)
- # We can get '..' in the path.
- filepath = os.path.normpath(filepath)
- # Sadly, still need to filter out directories here;
- # saw open_nocancel(".", 0, 0) = 0 lines.
- if os.path.isdir(filepath):
- return
- self._process_lookup[pid].add_file(filepath, False)
-
- def handle_ftruncate(self, pid, args):
- """Just used as a signal to kill dtrace, ignoring."""
- pass
-
- @staticmethod
- def _handle_ignored(pid, args):
- """Is called for all the event traces that are not handled."""
- raise NotImplementedError('Please implement me')
-
- @staticmethod
- def process_escaped_arguments(text):
- """Extracts escaped arguments on a string and return the arguments as a
- list.
-
- Implemented as an automaton.
-
- Example:
- With text = '\\001python2.7\\001-c\\001print(\\"hi\\")\\0', the
- function will return ['python2.7', '-c', 'print("hi")]
- """
- if not text.endswith('\\0'):
- raise ValueError('String is not null terminated: %r' % text, text)
- text = text[:-2]
-
- def unescape(x):
- """Replaces '\\' with '\' and '\?' (where ? is anything) with ?."""
- out = []
- escaped = False
- for i in x:
- if i == '\\' and not escaped:
- escaped = True
- continue
- escaped = False
- out.append(i)
- return ''.join(out)
-
- return [unescape(i) for i in text.split('\\001')]
-
- class Tracer(ApiBase.Tracer):
- # pylint: disable=C0301
- #
- # To understand the following code, you'll want to take a look at:
- # http://developers.sun.com/solaris/articles/dtrace_quickref/dtrace_quickref.html
- # https://wikis.oracle.com/display/DTrace/Variables
- # http://docs.oracle.com/cd/E19205-01/820-4221/
- #
- # 0. Dump all the valid probes into a text file. It is important, you
- # want to redirect into a file and you don't want to constantly 'sudo'.
- # $ sudo dtrace -l > probes.txt
- #
- # 1. Count the number of probes:
- # $ wc -l probes.txt
- # 81823 # On OSX 10.7, including 1 header line.
- #
- # 2. List providers, intentionally skipping all the 'syspolicy10925' and the
- # likes and skipping the header with NR>1:
- # $ awk 'NR>1 { print $2 }' probes.txt | sort | uniq | grep -v '[[:digit:]]'
- # dtrace
- # fbt
- # io
- # ip
- # lockstat
- # mach_trap
- # proc
- # profile
- # sched
- # syscall
- # tcp
- # vminfo
- #
- # 3. List of valid probes:
- # $ grep syscall probes.txt | less
- # or use dtrace directly:
- # $ sudo dtrace -l -P syscall | less
- #
- # trackedpid is an associative array where its value can be 0, 1 or 2.
- # 0 is for untracked processes and is the default value for items not
- # in the associative array.
- # 1 is for tracked processes.
- # 2 is for the script created by create_thunk() only. It is not tracked
- # itself but all its decendants are.
- #
- # The script will kill itself only once waiting_to_die == 1 and
- # current_processes == 0, so that both getlogin() was called and that
- # all traced processes exited.
- #
- # TODO(maruel): Use cacheable predicates. See
- # https://wikis.oracle.com/display/DTrace/Performance+Considerations
- D_CODE = """
- dtrace:::BEGIN {
- waiting_to_die = 0;
- current_processes = 0;
- logindex = 0;
- printf("%d %d %s_%s(\\"%s\\")\\n",
- logindex, PID, probeprov, probename, SCRIPT);
- logindex++;
- }
-
- proc:::start /trackedpid[ppid]/ {
- trackedpid[pid] = 1;
- current_processes += 1;
- printf("%d %d %s_%s(%d, \\"%s\\", %d)\\n",
- logindex, pid, probeprov, probename,
- ppid,
- execname,
- current_processes);
- logindex++;
- }
- /* Should use SCRIPT but there is no access to this variable at that
- * point. */
- proc:::start /ppid == PID && execname == "Python"/ {
- trackedpid[pid] = 2;
- current_processes += 1;
- printf("%d %d %s_%s(%d, \\"%s\\", %d)\\n",
- logindex, pid, probeprov, probename,
- ppid,
- execname,
- current_processes);
- logindex++;
- }
- proc:::exit /trackedpid[pid] &&
- current_processes == 1 &&
- waiting_to_die == 1/ {
- trackedpid[pid] = 0;
- current_processes -= 1;
- printf("%d %d %s_%s(%d)\\n",
- logindex, pid, probeprov, probename,
- current_processes);
- logindex++;
- exit(0);
- }
- proc:::exit /trackedpid[pid]/ {
- trackedpid[pid] = 0;
- current_processes -= 1;
- printf("%d %d %s_%s(%d)\\n",
- logindex, pid, probeprov, probename,
- current_processes);
- logindex++;
- }
-
- /* Use an arcane function to detect when we need to die */
- syscall::ftruncate:entry /pid == PID && arg0 == FILE_ID/ {
- waiting_to_die = 1;
- printf("%d %d %s()\\n", logindex, pid, probefunc);
- logindex++;
- }
- syscall::ftruncate:entry /
- pid == PID && arg0 == FILE_ID && current_processes == 0/ {
- exit(0);
- }
-
- syscall::open*:entry /trackedpid[pid] == 1/ {
- self->open_arg0 = arg0;
- self->open_arg1 = arg1;
- self->open_arg2 = arg2;
- }
- syscall::open*:return /trackedpid[pid] == 1 && errno == 0/ {
- this->open_arg0 = copyinstr(self->open_arg0);
- printf("%d %d %s(\\"%s\\", 0x%x, 0x%x)\\n",
- logindex, pid, probefunc,
- this->open_arg0,
- self->open_arg1,
- self->open_arg2);
- logindex++;
- this->open_arg0 = 0;
- }
- syscall::open*:return /trackedpid[pid] == 1/ {
- self->open_arg0 = 0;
- self->open_arg1 = 0;
- self->open_arg2 = 0;
- }
-
- syscall::rename:entry /trackedpid[pid] == 1/ {
- self->rename_arg0 = arg0;
- self->rename_arg1 = arg1;
- }
- syscall::rename:return /trackedpid[pid] == 1 && errno == 0/ {
- this->rename_arg0 = copyinstr(self->rename_arg0);
- this->rename_arg1 = copyinstr(self->rename_arg1);
- printf("%d %d %s(\\"%s\\", \\"%s\\")\\n",
- logindex, pid, probefunc,
- this->rename_arg0,
- this->rename_arg1);
- logindex++;
- this->rename_arg0 = 0;
- this->rename_arg1 = 0;
- }
- syscall::rename:return /trackedpid[pid] == 1/ {
- self->rename_arg0 = 0;
- self->rename_arg1 = 0;
- }
-
- /* Track chdir, it's painful because it is only receiving relative path.
- */
- syscall::chdir:entry /trackedpid[pid] == 1/ {
- self->chdir_arg0 = arg0;
- }
- syscall::chdir:return /trackedpid[pid] == 1 && errno == 0/ {
- this->chdir_arg0 = copyinstr(self->chdir_arg0);
- printf("%d %d %s(\\"%s\\")\\n",
- logindex, pid, probefunc,
- this->chdir_arg0);
- logindex++;
- this->chdir_arg0 = 0;
- }
- syscall::chdir:return /trackedpid[pid] == 1/ {
- self->chdir_arg0 = 0;
- }
- """
-
- # execve-specific code, tends to throw a lot of exceptions.
- D_CODE_EXECVE = """
- /* Finally what we care about! */
- syscall::exec*:entry /trackedpid[pid]/ {
- self->exec_arg0 = copyinstr(arg0);
- /* Incrementally probe for a NULL in the argv parameter of execve() to
- * figure out argc. */
- /* TODO(maruel): Skip the remaining copyin() when a NULL pointer was
- * found. */
- self->exec_argc = 0;
- /* Probe for argc==1 */
- this->exec_argv = (user_addr_t*)copyin(
- arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
- self->exec_argc = this->exec_argv[self->exec_argc] ?
- (self->exec_argc + 1) : self->exec_argc;
-
- /* Probe for argc==2 */
- this->exec_argv = (user_addr_t*)copyin(
- arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
- self->exec_argc = this->exec_argv[self->exec_argc] ?
- (self->exec_argc + 1) : self->exec_argc;
-
- /* Probe for argc==3 */
- this->exec_argv = (user_addr_t*)copyin(
- arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
- self->exec_argc = this->exec_argv[self->exec_argc] ?
- (self->exec_argc + 1) : self->exec_argc;
-
- /* Probe for argc==4 */
- this->exec_argv = (user_addr_t*)copyin(
- arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
- self->exec_argc = this->exec_argv[self->exec_argc] ?
- (self->exec_argc + 1) : self->exec_argc;
-
- /* Copy the inputs strings since there is no guarantee they'll be
- * present after the call completed. */
- self->exec_argv0 = (self->exec_argc > 0) ?
- copyinstr(this->exec_argv[0]) : "";
- self->exec_argv1 = (self->exec_argc > 1) ?
- copyinstr(this->exec_argv[1]) : "";
- self->exec_argv2 = (self->exec_argc > 2) ?
- copyinstr(this->exec_argv[2]) : "";
- self->exec_argv3 = (self->exec_argc > 3) ?
- copyinstr(this->exec_argv[3]) : "";
- this->exec_argv = 0;
- }
- syscall::exec*:return /trackedpid[pid] && errno == 0/ {
- /* We need to join strings here, as using multiple printf() would
- * cause tearing when multiple threads/processes are traced.
- * Since it is impossible to escape a string and join it to another one,
- * like sprintf("%s%S", previous, more), use hackery.
- * Each of the elements are split with a \\1. \\0 cannot be used because
- * it is simply ignored. This will conflict with any program putting a
- * \\1 in their execve() string but this should be "rare enough" */
- this->args = "";
- /* Process exec_argv[0] */
- this->args = strjoin(
- this->args, (self->exec_argc > 0) ? self->exec_argv0 : "");
-
- /* Process exec_argv[1] */
- this->args = strjoin(
- this->args, (self->exec_argc > 1) ? "\\1" : "");
- this->args = strjoin(
- this->args, (self->exec_argc > 1) ? self->exec_argv1 : "");
-
- /* Process exec_argv[2] */
- this->args = strjoin(
- this->args, (self->exec_argc > 2) ? "\\1" : "");
- this->args = strjoin(
- this->args, (self->exec_argc > 2) ? self->exec_argv2 : "");
-
- /* Process exec_argv[3] */
- this->args = strjoin(
- this->args, (self->exec_argc > 3) ? "\\1" : "");
- this->args = strjoin(
- this->args, (self->exec_argc > 3) ? self->exec_argv3 : "");
-
- /* Prints self->exec_argc to permits verifying the internal
- * consistency since this code is quite fishy. */
- printf("%d %d %s(\\"%s\\", [%d, %S])\\n",
- logindex, pid, probefunc,
- self->exec_arg0,
- self->exec_argc,
- this->args);
- logindex++;
- this->args = 0;
- }
- syscall::exec*:return /trackedpid[pid]/ {
- self->exec_arg0 = 0;
- self->exec_argc = 0;
- self->exec_argv0 = 0;
- self->exec_argv1 = 0;
- self->exec_argv2 = 0;
- self->exec_argv3 = 0;
- }
- """
-
- # Code currently not used.
- D_EXTRANEOUS = """
- /* This is a good learning experience, since it traces a lot of things
- * related to the process and child processes.
- * Warning: it generates a gigantic log. For example, tracing
- * "data/trace_inputs/child1.py --child" generates a 2mb log and takes
- * several minutes to execute.
- */
- /*
- mach_trap::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
- printf("%d %d %s_%s() = %d\\n",
- logindex, pid, probeprov, probefunc, errno);
- logindex++;
- }
- proc::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
- printf("%d %d %s_%s() = %d\\n",
- logindex, pid, probeprov, probefunc, errno);
- logindex++;
- }
- sched::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
- printf("%d %d %s_%s() = %d\\n",
- logindex, pid, probeprov, probefunc, errno);
- logindex++;
- }
- syscall::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
- printf("%d %d %s_%s() = %d\\n",
- logindex, pid, probeprov, probefunc, errno);
- logindex++;
- }
- vminfo::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
- printf("%d %d %s_%s() = %d\\n",
- logindex, pid, probeprov, probefunc, errno);
- logindex++;
- }
- */
- /* TODO(maruel): *stat* functions and friends
- syscall::access:return,
- syscall::chdir:return,
- syscall::chflags:return,
- syscall::chown:return,
- syscall::chroot:return,
- syscall::getattrlist:return,
- syscall::getxattr:return,
- syscall::lchown:return,
- syscall::lstat64:return,
- syscall::lstat:return,
- syscall::mkdir:return,
- syscall::pathconf:return,
- syscall::readlink:return,
- syscall::removexattr:return,
- syscall::setxattr:return,
- syscall::stat64:return,
- syscall::stat:return,
- syscall::truncate:return,
- syscall::unlink:return,
- syscall::utimes:return,
- */
- """
-
- def __init__(self, logname):
- """Starts the log collection with dtrace.
-
- Requires root access or chmod 4555 on dtrace. dtrace is asynchronous so
- this needs to wait for dtrace to be "warmed up".
- """
- super(Dtrace.Tracer, self).__init__(logname)
- self._script = create_thunk()
- # This unique dummy temp file is used to signal the dtrace script that it
- # should stop as soon as all the child processes are done. A bit hackish
- # but works fine enough.
- self._dummy_file_id, self._dummy_file_name = tempfile.mkstemp(
- prefix='trace_signal_file')
-
- # Note: do not use the -p flag. It's useless if the initial process quits
- # too fast, resulting in missing traces from the grand-children. The D
- # code manages the dtrace lifetime itself.
- trace_cmd = [
- 'sudo',
- 'dtrace',
- # Use a larger buffer if getting 'out of scratch space' errors.
- # Ref: https://wikis.oracle.com/display/DTrace/Options+and+Tunables
- '-b', '10m',
- '-x', 'dynvarsize=10m',
- #'-x', 'dtrace_global_maxsize=1m',
- '-x', 'evaltime=exec',
- '-o', '/dev/stderr',
- '-q',
- '-n', self._get_dtrace_code(),
- ]
- with open(self._logname + '.log', 'wb') as logfile:
- self._dtrace = subprocess.Popen(
- trace_cmd, stdout=logfile, stderr=subprocess.STDOUT)
- logging.debug('Started dtrace pid: %d' % self._dtrace.pid)
-
- # Reads until one line is printed, which signifies dtrace is up and ready.
- with open(self._logname + '.log', 'rb') as logfile:
- while 'dtrace_BEGIN' not in logfile.readline():
- if self._dtrace.poll() is not None:
- # Do a busy wait. :/
- break
- logging.debug('dtrace started')
-
- def _get_dtrace_code(self):
- """Setups the D code to implement child process tracking.
-
- Injects the cookie in the script so it knows when to stop.
-
- The script will detect any instance of the script created with
- create_thunk() and will start tracing it.
- """
- return (
- 'inline int PID = %d;\n'
- 'inline string SCRIPT = "%s";\n'
- 'inline int FILE_ID = %d;\n'
- '\n'
- '%s') % (
- os.getpid(),
- self._script,
- self._dummy_file_id,
- self.D_CODE) + self.D_CODE_EXECVE
-
- def trace(self, cmd, cwd, tracename, output):
- """Runs dtrace on an executable.
-
- This dtruss is broken when it starts the process itself or when tracing
- child processes, this code starts a wrapper process
- generated with create_thunk() which starts the executable to trace.
- """
- logging.info('trace(%s, %s, %s, %s)' % (cmd, cwd, tracename, output))
- assert os.path.isabs(cmd[0]), cmd[0]
- assert os.path.isabs(cwd), cwd
- assert os.path.normpath(cwd) == cwd, cwd
- with self._lock:
- if not self._initialized:
- raise TracingFailure(
- 'Called Tracer.trace() on an unitialized object',
- None, None, None, tracename)
- assert tracename not in (i['trace'] for i in self._traces)
-
- # Starts the script wrapper to start the child process. This signals the
- # dtrace script that this process is to be traced.
- stdout = stderr = None
- if output:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
- child_cmd = [
- sys.executable,
- self._script,
- tracename,
- ]
- # Call a dummy function so that dtrace knows I'm about to launch a process
- # that needs to be traced.
- # Yummy.
- child = subprocess.Popen(
- child_cmd + fix_python_path(cmd),
- stdin=subprocess.PIPE,
- stdout=stdout,
- stderr=stderr,
- cwd=cwd)
- logging.debug('Started child pid: %d' % child.pid)
-
- out = child.communicate()[0]
- # This doesn't mean tracing is done, one of the grand-child process may
- # still be alive. It will be tracked with the dtrace script.
-
- with self._lock:
- assert tracename not in (i['trace'] for i in self._traces)
- self._traces.append(
- {
- 'cmd': cmd,
- 'cwd': cwd,
- # The pid of strace process, not very useful.
- 'pid': child.pid,
- 'trace': tracename,
- 'output': out,
- })
- return child.returncode, out
-
- def close(self, timeout=None):
- """Terminates dtrace."""
- logging.debug('close(%s)' % timeout)
- try:
- try:
- super(Dtrace.Tracer, self).close(timeout)
- # Signal dtrace that it should stop now.
- os.ftruncate(self._dummy_file_id, 0)
- if timeout:
- start = time.time()
- # Use polling. :/
- while (self._dtrace.poll() is None and
- (time.time() - start) < timeout):
- time.sleep(0.1)
- self._dtrace.kill()
- self._dtrace.wait()
- finally:
- # Make sure to kill it in any case.
- if self._dtrace.poll() is None:
- try:
- self._dtrace.kill()
- self._dtrace.wait()
- except OSError:
- pass
-
- if self._dtrace.returncode != 0:
- # Warn about any dtrace failure but basically ignore it.
- print 'dtrace failure: %s' % self._dtrace.returncode
- finally:
- os.close(self._dummy_file_id)
- os.remove(self._dummy_file_name)
- os.remove(self._script)
-
- def post_process_log(self):
- """Sorts the log back in order when each call occured.
-
- dtrace doesn't save the buffer in strict order since it keeps one buffer
- per CPU.
- """
- super(Dtrace.Tracer, self).post_process_log()
- logname = self._logname + '.log'
- with open(logname, 'rb') as logfile:
- lines = [l for l in logfile if l.strip()]
- errors = [l for l in lines if l.startswith('dtrace:')]
- if errors:
- raise TracingFailure(
- 'Found errors in the trace: %s' % '\n'.join(errors),
- None, None, None, logname)
- try:
- lines = sorted(lines, key=lambda l: int(l.split(' ', 1)[0]))
- except ValueError:
- raise TracingFailure(
- 'Found errors in the trace: %s' % '\n'.join(
- l for l in lines if l.split(' ', 1)[0].isdigit()),
- None, None, None, logname)
- with open(logname, 'wb') as logfile:
- logfile.write(''.join(lines))
-
- @staticmethod
- def clean_trace(logname):
- for ext in ('', '.log'):
- if os.path.isfile(logname + ext):
- os.remove(logname + ext)
-
- @classmethod
- def parse_log(cls, logname, blacklist):
- logging.info('parse_log(%s, ...)' % logname)
-
- def blacklist_more(filepath):
- # All the HFS metadata is in the form /.vol/...
- return blacklist(filepath) or re.match(r'^\/\.vol\/.+$', filepath)
-
- data = read_json(logname)
- out = []
- for item in data['traces']:
- context = cls.Context(blacklist_more, item['pid'], item['cwd'])
- for line in open(logname + '.log', 'rb'):
- context.on_line(line)
- out.append(
- {
- 'results': context.to_results(),
- 'trace': item['trace'],
- 'output': item['output'],
- })
- return out
-
-
-class LogmanTrace(ApiBase):
- """Uses the native Windows ETW based tracing functionality to trace a child
- process.
-
- Caveat: this implementations doesn't track cwd or initial_cwd. It is because
- the Windows Kernel doesn't have a concept of 'current working directory' at
- all. A Win32 process has a map of current directories, one per drive letter
- and it is managed by the user mode kernel32.dll. In kernel, a file is always
- opened relative to another file_object or as an absolute path. All the current
- working directory logic is done in user mode.
- """
- class Context(ApiBase.Context):
- """Processes a ETW log line and keeps the list of existent and non
- existent files accessed.
-
- Ignores directories.
- """
- # These indexes are for the stripped version in json.
- EVENT_NAME = 0
- TYPE = 1
- PID = 2
- TID = 3
- PROCESSOR_ID = 4
- TIMESTAMP = 5
- USER_DATA = 6
-
- class Process(ApiBase.Context.Process):
- def __init__(self, *args):
- super(LogmanTrace.Context.Process, self).__init__(*args)
- # Handle file objects that succeeded.
- self.file_objects = {}
-
- def __init__(self, blacklist, tracer_pid):
- logging.info('%s(%d)' % (self.__class__.__name__, tracer_pid))
- super(LogmanTrace.Context, self).__init__(blacklist)
- self._drive_map = DosDriveMap()
- # Threads mapping to the corresponding process id.
- self._threads_active = {}
- # Process ID of the tracer, e.g. tracer_inputs.py
- self._tracer_pid = tracer_pid
- self._line_number = 0
-
- def on_line(self, line):
- """Processes a json Event line."""
- self._line_number += 1
- try:
- # By Opcode
- handler = getattr(
- self,
- 'handle_%s_%s' % (line[self.EVENT_NAME], line[self.TYPE]),
- None)
- if not handler:
- raise TracingFailure(
- 'Unexpected event %s_%s' % (
- line[self.EVENT_NAME], line[self.TYPE]),
- None, None, None)
- handler(line)
- except TracingFailure, e:
- # Hack in the values since the handler could be a static function.
- e.pid = line[self.PID]
- e.line = line
- e.line_number = self._line_number
- # Re-raise the modified exception.
- raise
- except (KeyError, NotImplementedError, ValueError), e:
- raise TracingFailure(
- 'Trace generated a %s exception: %s' % (
- e.__class__.__name__, str(e)),
- line[self.PID],
- self._line_number,
- line,
- e)
-
- def to_results(self):
- if not self.root_process:
- raise TracingFailure(
- 'Failed to detect the initial process',
- None, None, None)
- process = self.root_process.to_results_process()
- return Results(process)
-
- def _thread_to_process(self, tid):
- """Finds the process from the thread id."""
- tid = int(tid, 16)
- pid = self._threads_active.get(tid)
- if not pid or not self._process_lookup.get(pid):
- return
- return self._process_lookup[pid]
-
- @classmethod
- def handle_EventTrace_Header(cls, line):
- """Verifies no event was dropped, e.g. no buffer overrun occured."""
- BUFFER_SIZE = cls.USER_DATA
- #VERSION = cls.USER_DATA + 1
- #PROVIDER_VERSION = cls.USER_DATA + 2
- #NUMBER_OF_PROCESSORS = cls.USER_DATA + 3
- #END_TIME = cls.USER_DATA + 4
- #TIMER_RESOLUTION = cls.USER_DATA + 5
- #MAX_FILE_SIZE = cls.USER_DATA + 6
- #LOG_FILE_MODE = cls.USER_DATA + 7
- #BUFFERS_WRITTEN = cls.USER_DATA + 8
- #START_BUFFERS = cls.USER_DATA + 9
- #POINTER_SIZE = cls.USER_DATA + 10
- EVENTS_LOST = cls.USER_DATA + 11
- #CPU_SPEED = cls.USER_DATA + 12
- #LOGGER_NAME = cls.USER_DATA + 13
- #LOG_FILE_NAME = cls.USER_DATA + 14
- #BOOT_TIME = cls.USER_DATA + 15
- #PERF_FREQ = cls.USER_DATA + 16
- #START_TIME = cls.USER_DATA + 17
- #RESERVED_FLAGS = cls.USER_DATA + 18
- #BUFFERS_LOST = cls.USER_DATA + 19
- #SESSION_NAME_STRING = cls.USER_DATA + 20
- #LOG_FILE_NAME_STRING = cls.USER_DATA + 21
- if line[EVENTS_LOST] != '0':
- raise TracingFailure(
- ( '%s events were lost during trace, please increase the buffer '
- 'size from %s') % (line[EVENTS_LOST], line[BUFFER_SIZE]),
- None, None, None)
-
- def handle_FileIo_Cleanup(self, line):
- """General wisdom: if a file is closed, it's because it was opened.
-
- Note that FileIo_Close is not used since if a file was opened properly but
- not closed before the process exits, only Cleanup will be logged.
- """
- #IRP = self.USER_DATA
- TTID = self.USER_DATA + 1 # Thread ID, that's what we want.
- FILE_OBJECT = self.USER_DATA + 2
- #FILE_KEY = self.USER_DATA + 3
- proc = self._thread_to_process(line[TTID])
- if not proc:
- # Not a process we care about.
- return
- file_object = line[FILE_OBJECT]
- if file_object in proc.file_objects:
- proc.add_file(proc.file_objects.pop(file_object), False)
-
- def handle_FileIo_Create(self, line):
- """Handles a file open.
-
- All FileIo events are described at
- http://msdn.microsoft.com/library/windows/desktop/aa363884.aspx
- for some value of 'description'.
-
- " (..) process and thread id values of the IO events (..) are not valid "
- http://msdn.microsoft.com/magazine/ee358703.aspx
-
- The FileIo.Create event doesn't return if the CreateFile() call
- succeeded, so keep track of the file_object and check that it is
- eventually closed with FileIo_Cleanup.
- """
- #IRP = self.USER_DATA
- TTID = self.USER_DATA + 1 # Thread ID, that's what we want.
- FILE_OBJECT = self.USER_DATA + 2
- #CREATE_OPTIONS = self.USER_DATA + 3
- #FILE_ATTRIBUTES = self.USER_DATA + 4
- #self.USER_DATA + SHARE_ACCESS = 5
- OPEN_PATH = self.USER_DATA + 6
-
- proc = self._thread_to_process(line[TTID])
- if not proc:
- # Not a process we care about.
- return
-
- match = re.match(r'^\"(.+)\"$', line[OPEN_PATH])
- raw_path = match.group(1)
- # Ignore directories and bare drive right away.
- if raw_path.endswith(os.path.sep):
- return
- filepath = self._drive_map.to_win32(raw_path)
- # Ignore bare drive right away. Some may still fall through with format
- # like '\\?\X:'
- if len(filepath) == 2:
- return
- file_object = line[FILE_OBJECT]
- if os.path.isdir(filepath):
- # There is no O_DIRECTORY equivalent on Windows. The closed is
- # FILE_FLAG_BACKUP_SEMANTICS but it's not exactly right either. So
- # simply discard directories are they are found.
- return
- # Override any stale file object
- proc.file_objects[file_object] = filepath
-
- def handle_FileIo_Rename(self, line):
- # TODO(maruel): Handle?
- pass
-
- def handle_Process_End(self, line):
- pid = line[self.PID]
- if self._process_lookup.get(pid):
- logging.info('Terminated: %d' % pid)
- self._process_lookup[pid] = None
- else:
- logging.debug('Terminated: %d' % pid)
-
- def handle_Process_Start(self, line):
- """Handles a new child process started by PID."""
- #UNIQUE_PROCESS_KEY = self.USER_DATA
- PROCESS_ID = self.USER_DATA + 1
- #PARENT_PID = self.USER_DATA + 2
- #SESSION_ID = self.USER_DATA + 3
- #EXIT_STATUS = self.USER_DATA + 4
- #DIRECTORY_TABLE_BASE = self.USER_DATA + 5
- #USER_SID = self.USER_DATA + 6
- IMAGE_FILE_NAME = self.USER_DATA + 7
- COMMAND_LINE = self.USER_DATA + 8
-
- ppid = line[self.PID]
- pid = int(line[PROCESS_ID], 16)
- logging.debug(
- 'New process %d->%d (%s) %s' %
- (ppid, pid, line[IMAGE_FILE_NAME], line[COMMAND_LINE]))
-
- if ppid == self._tracer_pid:
- # Need to ignore processes we don't know about because the log is
- # system-wide. self._tracer_pid shall start only one process.
- if self.root_process:
- raise TracingFailure(
- ( 'Parent process is _tracer_pid(%d) but root_process(%d) is '
- 'already set') % (self._tracer_pid, self.root_process.pid),
- None, None, None)
- proc = self.Process(self.blacklist, pid, None)
- self.root_process = proc
- ppid = None
- elif self._process_lookup.get(ppid):
- proc = self.Process(self.blacklist, pid, None)
- self._process_lookup[ppid].children.append(proc)
- else:
- # Ignore
- return
- self._process_lookup[pid] = proc
-
- if (not line[IMAGE_FILE_NAME].startswith('"') or
- not line[IMAGE_FILE_NAME].endswith('"')):
- raise TracingFailure(
- 'Command line is not properly quoted: %s' % line[IMAGE_FILE_NAME],
- None, None, None)
-
- # TODO(maruel): Process escapes.
- if (not line[COMMAND_LINE].startswith('"') or
- not line[COMMAND_LINE].endswith('"')):
- raise TracingFailure(
- 'Command line is not properly quoted: %s' % line[COMMAND_LINE],
- None, None, None)
- proc.command = CommandLineToArgvW(line[COMMAND_LINE][1:-1])
- proc.executable = line[IMAGE_FILE_NAME][1:-1]
- # proc.command[0] may be the absolute path of 'executable' but it may be
- # anything else too. If it happens that command[0] ends with executable,
- # use it, otherwise defaults to the base name.
- cmd0 = proc.command[0].lower()
- if not cmd0.endswith('.exe'):
- # TODO(maruel): That's not strictly true either.
- cmd0 += '.exe'
- if cmd0.endswith(proc.executable) and os.path.isfile(cmd0):
- # Fix the path.
- cmd0 = cmd0.replace('/', os.path.sep)
- cmd0 = os.path.normpath(cmd0)
- proc.executable = get_native_path_case(cmd0)
- logging.info(
- 'New child: %s -> %d %s' % (ppid, pid, proc.executable))
-
- def handle_Thread_End(self, line):
- """Has the same parameters as Thread_Start."""
- tid = int(line[self.TID], 16)
- self._threads_active.pop(tid, None)
-
- def handle_Thread_Start(self, line):
- """Handles a new thread created.
-
- Do not use self.PID here since a process' initial thread is created by
- the parent process.
- """
- PROCESS_ID = self.USER_DATA
- TTHREAD_ID = self.USER_DATA + 1
- #STACK_BASE = self.USER_DATA + 2
- #STACK_LIMIT = self.USER_DATA + 3
- #USER_STACK_BASE = self.USER_DATA + 4
- #USER_STACK_LIMIT = self.USER_DATA + 5
- #AFFINITY = self.USER_DATA + 6
- #WIN32_START_ADDR = self.USER_DATA + 7
- #TEB_BASE = self.USER_DATA + 8
- #SUB_PROCESS_TAG = self.USER_DATA + 9
- #BASE_PRIORITY = self.USER_DATA + 10
- #PAGE_PRIORITY = self.USER_DATA + 11
- #IO_PRIORITY = self.USER_DATA + 12
- #THREAD_FLAGS = self.USER_DATA + 13
- # Do not use self.PID here since a process' initial thread is created by
- # the parent process.
- pid = int(line[PROCESS_ID], 16)
- tid = int(line[TTHREAD_ID], 16)
- logging.debug('New thread pid:%d, tid:%d' % (pid, tid))
- self._threads_active[tid] = pid
-
- @classmethod
- def supported_events(cls):
- """Returns all the procesed events."""
- out = []
- for member in dir(cls):
- match = re.match(r'^handle_([A-Za-z]+)_([A-Za-z]+)$', member)
- if match:
- out.append(match.groups())
- return out
-
- class Tracer(ApiBase.Tracer):
- # The basic headers.
- EXPECTED_HEADER = [
- u'Event Name',
- u'Type',
- u'Event ID',
- u'Version',
- u'Channel',
- u'Level', # 5
- u'Opcode',
- u'Task',
- u'Keyword',
- u'PID',
- u'TID', # 10
- u'Processor Number',
- u'Instance ID',
- u'Parent Instance ID',
- u'Activity ID',
- u'Related Activity ID', # 15
- u'Clock-Time',
- u'Kernel(ms)', # Both have a resolution of ~15ms which makes them
- u'User(ms)', # pretty much useless.
- u'User Data', # Extra arguments that are event-specific.
- ]
- # Only the useful headers common to all entries are listed there. Any column
- # at 19 or higher is dependent on the specific event.
- EVENT_NAME = 0
- TYPE = 1
- PID = 9
- TID = 10
- PROCESSOR_ID = 11
- TIMESTAMP = 16
- NULL_GUID = '{00000000-0000-0000-0000-000000000000}'
- USER_DATA = 19
-
- def __init__(self, logname):
- """Starts the log collection.
-
- Requires administrative access. logman.exe is synchronous so no need for a
- "warmup" call. 'Windows Kernel Trace' is *localized* so use its GUID
- instead. The GUID constant name is SystemTraceControlGuid. Lovely.
-
- One can get the list of potentially interesting providers with:
- "logman query providers | findstr /i file"
- """
- super(LogmanTrace.Tracer, self).__init__(logname)
- self._script = create_thunk()
- cmd_start = [
- 'logman.exe',
- 'start',
- 'NT Kernel Logger',
- '-p', '{9e814aad-3204-11d2-9a82-006008a86939}',
- # splitio,fileiocompletion,syscall,file,cswitch,img
- '(process,fileio,thread)',
- '-o', self._logname + '.etl',
- '-ets', # Send directly to kernel
- # Values extracted out of thin air.
- # Event Trace Session buffer size in kb.
- '-bs', '10240',
- # Number of Event Trace Session buffers.
- '-nb', '16', '256',
- ]
- logging.debug('Running: %s' % cmd_start)
- try:
- subprocess.check_call(
- cmd_start,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError, e:
- if e.returncode == -2147024891:
- print >> sys.stderr, 'Please restart with an elevated admin prompt'
- elif e.returncode == -2144337737:
- print >> sys.stderr, (
- 'A kernel trace was already running, stop it and try again')
- raise
-
- def trace(self, cmd, cwd, tracename, output):
- logging.info('trace(%s, %s, %s, %s)' % (cmd, cwd, tracename, output))
- assert os.path.isabs(cmd[0]), cmd[0]
- assert os.path.isabs(cwd), cwd
- assert os.path.normpath(cwd) == cwd, cwd
- with self._lock:
- if not self._initialized:
- raise TracingFailure(
- 'Called Tracer.trace() on an unitialized object',
- None, None, None, tracename)
- assert tracename not in (i['trace'] for i in self._traces)
-
- # Use "logman -?" for help.
-
- stdout = stderr = None
- if output:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
-
- # Run the child process.
- logging.debug('Running: %s' % cmd)
- # Use the temporary script generated with create_thunk() so we have a
- # clear pid owner. Since trace_inputs.py can be used as a library and
- # could trace multiple processes simultaneously, it makes it more complex
- # if the executable to be traced is executed directly here. It also solves
- # issues related to logman.exe that needs to be executed to control the
- # kernel trace.
- child_cmd = [
- sys.executable,
- self._script,
- tracename,
- ]
- child = subprocess.Popen(
- child_cmd + fix_python_path(cmd),
- cwd=cwd,
- stdin=subprocess.PIPE,
- stdout=stdout,
- stderr=stderr)
- logging.debug('Started child pid: %d' % child.pid)
- out = child.communicate()[0]
- # This doesn't mean all the grand-children are done. Sadly, we don't have
- # a good way to determine that.
-
- with self._lock:
- assert tracename not in (i['trace'] for i in self._traces)
- self._traces.append({
- 'command': cmd,
- 'cwd': cwd,
- 'pid': child.pid,
- 'trace': tracename,
- 'output': out,
- })
-
- return child.returncode, out
-
- def close(self, _timeout=None):
- """Stops the kernel log collection and converts the traces to text
- representation.
- """
- with self._lock:
- if not self._initialized:
- raise TracingFailure(
- 'Called Tracer.close() on an unitialized object',
- None, None, None)
- os.remove(self._script)
- # Save metadata, add 'format' key..
- data = {
- 'format': 'csv',
- 'traces': self._traces,
- }
- write_json(self._logname, data, False)
-
- cmd_stop = [
- 'logman.exe',
- 'stop',
- 'NT Kernel Logger',
- '-ets', # Sends the command directly to the kernel.
- ]
- logging.debug('Running: %s' % cmd_stop)
- subprocess.check_call(
- cmd_stop,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- self._initialized = False
-
- def post_process_log(self):
- """Converts the .etl file into .csv then into .json."""
- super(LogmanTrace.Tracer, self).post_process_log()
- logformat = 'csv'
- self._convert_log(logformat)
-
- if logformat == 'csv_utf16':
- def load_file():
- def utf_8_encoder(unicode_csv_data):
- """Encodes the unicode object as utf-8 encoded str instance"""
- for line in unicode_csv_data:
- yield line.encode('utf-8')
-
- def unicode_csv_reader(unicode_csv_data, **kwargs):
- """Encodes temporarily as UTF-8 since csv module doesn't do unicode.
- """
- csv_reader = csv.reader(utf_8_encoder(unicode_csv_data), **kwargs)
- for row in csv_reader:
- # Decode str utf-8 instances back to unicode instances, cell by
- # cell:
- yield [cell.decode('utf-8') for cell in row]
-
- # The CSV file is UTF-16 so use codecs.open() to load the file into
- # the python internal unicode format (utf-8). Then explicitly
- # re-encode as utf8 as str instances so csv can parse it fine. Then
- # decode the utf-8 str back into python unicode instances. This
- # sounds about right.
- for line in unicode_csv_reader(
- codecs.open(self._logname + '.' + logformat, 'r', 'utf-16')):
- # line is a list of unicode objects
- # So much white space!
- yield [i.strip() for i in line]
-
- elif logformat == 'csv':
- def load_file():
- def ansi_csv_reader(ansi_csv_data, **kwargs):
- """Loads an 'ANSI' code page and returns unicode() objects."""
- assert sys.getfilesystemencoding() == 'mbcs'
- encoding = get_current_encoding()
- for row in csv.reader(ansi_csv_data, **kwargs):
- # Decode str 'ansi' instances to unicode instances, cell by cell:
- yield [cell.decode(encoding) for cell in row]
-
- # The fastest and smallest format but only supports 'ANSI' file paths.
- # E.g. the filenames are encoding in the 'current' encoding.
- for line in ansi_csv_reader(open(self._logname + '.' + logformat)):
- # line is a list of unicode objects.
- yield [i.strip() for i in line]
-
- supported_events = LogmanTrace.Context.supported_events()
-
- def trim(generator):
- for index, line in enumerate(generator):
- if not index:
- if line != self.EXPECTED_HEADER:
- raise TracingFailure(
- 'Found malformed header: %s' % ' '.join(line),
- None, None, None)
- continue
- # As you can see, the CSV is full of useful non-redundant information:
- if (line[2] != '0' or # Event ID
- line[3] not in ('2', '3') or # Version
- line[4] != '0' or # Channel
- line[5] != '0' or # Level
- line[7] != '0' or # Task
- line[8] != '0x0000000000000000' or # Keyword
- line[12] != '' or # Instance ID
- line[13] != '' or # Parent Instance ID
- line[14] != self.NULL_GUID or # Activity ID
- line[15] != ''): # Related Activity ID
- raise TracingFailure(
- 'Found unexpected values in line: %s' % ' '.join(line),
- None, None, None)
-
- if (line[self.EVENT_NAME], line[self.TYPE]) not in supported_events:
- continue
-
- # Convert the PID in-place from hex.
- line[self.PID] = int(line[self.PID], 16)
-
- yield [
- line[self.EVENT_NAME],
- line[self.TYPE],
- line[self.PID],
- line[self.TID],
- line[self.PROCESSOR_ID],
- line[self.TIMESTAMP],
- ] + line[self.USER_DATA:]
-
- write_json('%s.json' % self._logname, list(trim(load_file())), True)
-
- def _convert_log(self, logformat):
- """Converts the ETL trace to text representation.
-
- Normally, 'csv' is sufficient. If complex scripts are used (like eastern
- languages), use 'csv_utf16'. If localization gets in the way, use 'xml'.
-
- Arguments:
- - logformat: Text format to be generated, csv, csv_utf16 or xml.
-
- Use "tracerpt -?" for help.
- """
- LOCALE_INVARIANT = 0x7F
- windll.kernel32.SetThreadLocale(LOCALE_INVARIANT)
- cmd_convert = [
- 'tracerpt.exe',
- '-l', self._logname + '.etl',
- '-o', self._logname + '.' + logformat,
- '-gmt', # Use UTC
- '-y', # No prompt
- # Use -of XML to get the header of each items after column 19, e.g. all
- # the actual headers of 'User Data'.
- ]
-
- if logformat == 'csv':
- # tracerpt localizes the 'Type' column, for major brainfuck
- # entertainment. I can't imagine any sane reason to do that.
- cmd_convert.extend(['-of', 'CSV'])
- elif logformat == 'csv_utf16':
- # This causes it to use UTF-16, which doubles the log size but ensures
- # the log is readable for non-ASCII characters.
- cmd_convert.extend(['-of', 'CSV', '-en', 'Unicode'])
- elif logformat == 'xml':
- cmd_convert.extend(['-of', 'XML'])
- else:
- raise ValueError('Unexpected log format \'%s\'' % logformat)
- logging.debug('Running: %s' % cmd_convert)
- # This can takes tens of minutes for large logs.
- # Redirects all output to stderr.
- subprocess.check_call(
- cmd_convert,
- stdin=subprocess.PIPE,
- stdout=sys.stderr,
- stderr=sys.stderr)
-
- @staticmethod
- def clean_trace(logname):
- for ext in ('', '.csv', '.etl', '.json', '.xml'):
- if os.path.isfile(logname + ext):
- os.remove(logname + ext)
-
- @classmethod
- def parse_log(cls, logname, blacklist):
- logging.info('parse_log(%s, %s)' % (logname, blacklist))
-
- def blacklist_more(filepath):
- # All the NTFS metadata is in the form x:\$EXTEND or stuff like that.
- return blacklist(filepath) or re.match(r'[A-Z]\:\\\$EXTEND', filepath)
-
- data = read_json(logname)
- lines = read_json(logname + '.json')
- out = []
- for item in data['traces']:
- context = cls.Context(blacklist_more, item['pid'])
- for line in lines:
- context.on_line(line)
- out.append(
- {
- 'results': context.to_results(),
- 'trace': item['trace'],
- 'output': item['output'],
- })
- return out
-
-
-def get_api():
- """Returns the correct implementation for the current OS."""
- if sys.platform == 'cygwin':
- raise NotImplementedError(
- 'Not implemented for cygwin, start the script from Win32 python')
- flavors = {
- 'win32': LogmanTrace,
- 'darwin': Dtrace,
- 'sunos5': Dtrace,
- 'freebsd7': Dtrace,
- 'freebsd8': Dtrace,
- }
- # Defaults to strace.
- return flavors.get(sys.platform, Strace)()
-
-
-def extract_directories(root_dir, files, blacklist):
- """Detects if all the files in a directory are in |files| and if so, replace
- the individual files by a Results.Directory instance.
-
- Takes a list of Results.File instances and returns a shorter list of
- Results.File and Results.Directory instances.
-
- Arguments:
- - root_dir: Optional base directory that shouldn't be search further.
- - files: list of Results.File instances.
- - blacklist: regexp of files to ignore, for example r'.+\.pyc'.
- """
- logging.info(
- 'extract_directories(%s, %d files, ...)' % (root_dir, len(files)))
- assert not (root_dir or '').endswith(os.path.sep), root_dir
- assert not root_dir or (get_native_path_case(root_dir) == root_dir)
- assert not any(isinstance(f, Results.Directory) for f in files)
- # Remove non existent files.
- files = [f for f in files if f.existent]
- if not files:
- return files
- # All files must share the same root, which can be None.
- assert len(set(f.root for f in files)) == 1, set(f.root for f in files)
-
- # Creates a {directory: {filename: File}} mapping, up to root.
- buckets = {}
- if root_dir:
- buckets[root_dir] = {}
- for fileobj in files:
- path = fileobj.full_path
- directory = os.path.dirname(path)
- assert directory
- # Do not use os.path.basename() so trailing os.path.sep is kept.
- basename = path[len(directory)+1:]
- files_in_directory = buckets.setdefault(directory, {})
- files_in_directory[basename] = fileobj
- # Add all the directories recursively up to root.
- while True:
- old_d = directory
- directory = os.path.dirname(directory)
- if directory + os.path.sep == root_dir or directory == old_d:
- break
- buckets.setdefault(directory, {})
-
- root_prefix = len(root_dir) + 1 if root_dir else 0
- for directory in sorted(buckets, reverse=True):
- actual = set(f for f in os.listdir(directory) if not blacklist(f))
- expected = set(buckets[directory])
- if not (actual - expected):
- parent = os.path.dirname(directory)
- buckets[parent][os.path.basename(directory)] = Results.Directory(
- root_dir,
- directory[root_prefix:],
- False,
- sum(f.size for f in buckets[directory].itervalues()),
- sum(f.nb_files for f in buckets[directory].itervalues()))
- # Remove the whole bucket.
- del buckets[directory]
-
- # Reverse the mapping with what remains. The original instances are returned,
- # so the cached meta data is kept.
- files = sum((x.values() for x in buckets.itervalues()), [])
- return sorted(files, key=lambda x: x.path)
-
-
-def trace(logfile, cmd, cwd, api, output):
- """Traces an executable. Returns (returncode, output) from api.
-
- Arguments:
- - logfile: file to write to.
- - cmd: command to run.
- - cwd: current directory to start the process in.
- - api: a tracing api instance.
- - output: if True, returns output, otherwise prints it at the console.
- """
- cmd = fix_python_path(cmd)
- api.clean_trace(logfile)
- with api.get_tracer(logfile) as tracer:
- return tracer.trace(cmd, cwd, 'default', output)
-
-
-def load_trace(logfile, root_dir, api, blacklist):
- """Loads a trace file and returns the Results instance.
-
- Arguments:
- - logfile: File to load.
- - root_dir: Root directory to use to determine if a file is relevant to the
- trace or not.
- - api: A tracing api instance.
- - blacklist: Optional blacklist function to filter out unimportant files.
- """
- data = api.parse_log(logfile, (blacklist or (lambda _: False)))
- assert len(data) == 1, 'More than one trace was detected!'
- if 'exception' in data[0]:
- # It got an exception, raise it.
- raise data[0]['exception']
- results = data[0]['results']
- if root_dir:
- results = results.strip_root(root_dir)
- return results
-
-
-def CMDclean(args):
- """Cleans up traces."""
- parser = OptionParserTraceInputs(command='clean')
- options, args = parser.parse_args(args)
- api = get_api()
- api.clean_trace(options.log)
- return 0
-
-
-def CMDtrace(args):
- """Traces an executable."""
- parser = OptionParserTraceInputs(command='trace')
- parser.allow_interspersed_args = False
- parser.add_option(
- '-q', '--quiet', action='store_true',
- help='Redirects traced executable output to /dev/null')
- options, args = parser.parse_args(args)
-
- if not args:
- parser.error('Please provide a command to run')
-
- if not os.path.isabs(args[0]) and os.access(args[0], os.X_OK):
- args[0] = os.path.abspath(args[0])
-
- api = get_api()
- return trace(options.log, args, os.getcwd(), api, options.quiet)[0]
-
-
-def CMDread(args):
- """Reads the logs and prints the result."""
- parser = OptionParserTraceInputs(command='read')
- parser.add_option(
- '-V', '--variable',
- nargs=2,
- action='append',
- dest='variables',
- metavar='VAR_NAME directory',
- default=[],
- help=('Variables to replace relative directories against. Example: '
- '"-v \'$HOME\' \'/home/%s\'" will replace all occurence of your '
- 'home dir with $HOME') % getpass.getuser())
- parser.add_option(
- '--root-dir',
- help='Root directory to base everything off it. Anything outside of this '
- 'this directory will not be reported')
- parser.add_option(
- '-j', '--json', action='store_true',
- help='Outputs raw result data as json')
- parser.add_option(
- '-b', '--blacklist', action='append', default=[],
- help='List of regexp to use as blacklist filter')
- options, args = parser.parse_args(args)
-
- if options.root_dir:
- options.root_dir = os.path.abspath(options.root_dir)
-
- variables = dict(options.variables)
- api = get_api()
- def blacklist(f):
- return any(re.match(b, f) for b in options.blacklist)
- data = api.parse_log(options.log, blacklist)
- # Process each trace.
- output_as_json = []
- for item in data:
- if 'exception' in item:
- print >> sys.stderr, (
- 'Trace %s: Got an exception: %s' % (item['trace'], item['exception']))
- continue
- results = item['results']
- if options.root_dir:
- results = results.strip_root(options.root_dir)
-
- if options.json:
- output_as_json.append(results.flatten())
- else:
- simplified = extract_directories(
- options.root_dir, results.files, blacklist)
- simplified = [f.replace_variables(variables) for f in simplified]
- if len(data) > 1:
- print('Trace: %s' % item['trace'])
- print('Total: %d' % len(results.files))
- print('Non existent: %d' % len(results.non_existent))
- for f in results.non_existent:
- print(' %s' % f.path)
- print(
- 'Interesting: %d reduced to %d' % (
- len(results.existent), len(simplified)))
- for f in simplified:
- print(' %s' % f.path)
-
- if options.json:
- write_json(sys.stdout, output_as_json, False)
- return 0
-
-
-class OptionParserWithLogging(optparse.OptionParser):
- """Adds --verbose option."""
- def __init__(self, verbose=0, **kwargs):
- optparse.OptionParser.__init__(self, **kwargs)
- self.add_option(
- '-v', '--verbose',
- action='count',
- default=verbose,
- help='Use multiple times to increase verbosity')
-
- def parse_args(self, *args, **kwargs):
- options, args = optparse.OptionParser.parse_args(self, *args, **kwargs)
- levels = [logging.ERROR, logging.INFO, logging.DEBUG]
- logging.basicConfig(
- level=levels[min(len(levels)-1, options.verbose)],
- format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s')
- return options, args
-
-
-class OptionParserWithNiceDescription(OptionParserWithLogging):
- """Generates the description with the command's docstring."""
- def __init__(self, **kwargs):
- """Sets 'description' and 'usage' if not already specified."""
- command = kwargs.pop('command', 'help')
- kwargs.setdefault(
- 'description',
- re.sub('[\r\n ]{2,}', ' ', get_command_handler(command).__doc__))
- kwargs.setdefault('usage', '%%prog %s [options]' % command)
- OptionParserWithLogging.__init__(self, **kwargs)
-
-
-class OptionParserTraceInputs(OptionParserWithNiceDescription):
- """Adds automatic --log handling."""
- def __init__(self, **kwargs):
- OptionParserWithNiceDescription.__init__(self, **kwargs)
- self.add_option(
- '-l', '--log', help='Log file to generate or read, required')
-
- def parse_args(self, *args, **kwargs):
- """Makes sure the paths make sense.
-
- On Windows, / and \ are often mixed together in a path.
- """
- options, args = OptionParserWithNiceDescription.parse_args(
- self, *args, **kwargs)
- if not options.log:
- self.error('Must supply a log file with -l')
- options.log = os.path.abspath(options.log)
- return options, args
-
-
-def extract_documentation():
- """Returns a dict {command: description} for each of documented command."""
- commands = (
- fn[3:]
- for fn in dir(sys.modules['__main__'])
- if fn.startswith('CMD') and get_command_handler(fn[3:]).__doc__)
- return dict((fn, get_command_handler(fn).__doc__) for fn in commands)
-
-
-def CMDhelp(args):
- """Prints list of commands or help for a specific command."""
- doc = extract_documentation()
- # Calculates the optimal offset.
- offset = max(len(cmd) for cmd in doc)
- format_str = ' %-' + str(offset + 2) + 's %s'
- # Generate a one-liner documentation of each commands.
- commands_description = '\n'.join(
- format_str % (cmd, doc[cmd].split('\n')[0]) for cmd in sorted(doc))
-
- parser = OptionParserWithNiceDescription(
- usage='%prog <command> [options]',
- description='Commands are:\n%s\n' % commands_description)
- parser.format_description = lambda _: parser.description
-
- # Strip out any -h or --help argument.
- _, args = parser.parse_args([i for i in args if not i in ('-h', '--help')])
- if len(args) == 1:
- if not get_command_handler(args[0]):
- parser.error('Unknown command %s' % args[0])
- # The command was "%prog help command", replaces ourself with
- # "%prog command --help" so help is correctly printed out.
- return main(args + ['--help'])
- elif args:
- parser.error('Unknown argument "%s"' % ' '.join(args))
- parser.print_help()
- return 0
-
-
-def get_command_handler(name):
- """Returns the command handler or CMDhelp if it doesn't exist."""
- return getattr(sys.modules['__main__'], 'CMD%s' % name, None)
-
-
-def main_impl(argv):
- command = get_command_handler(argv[0] if argv else 'help')
- if not command:
- return CMDhelp(argv)
- return command(argv[1:])
-
-def main(argv):
- try:
- main_impl(argv)
- except TracingFailure, e:
- sys.stderr.write('\nError: ')
- sys.stderr.write(str(e))
- sys.stderr.write('\n')
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/tools/isolate/trace_test_cases.py b/tools/isolate/trace_test_cases.py
deleted file mode 100755
index 1cd7b38..0000000
--- a/tools/isolate/trace_test_cases.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Traces each test cases of a google-test executable individually.
-
-Gives detailed information about each test case. The logs can be read afterward
-with ./trace_inputs.py read -l /path/to/executable.logs
-"""
-
-import logging
-import multiprocessing
-import os
-import sys
-import time
-
-import run_test_cases
-import trace_inputs
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
-
-
-class Tracer(object):
- def __init__(self, tracer, cmd, cwd_dir, progress):
- # Constants
- self.tracer = tracer
- self.cmd = cmd[:]
- self.cwd_dir = cwd_dir
- self.progress = progress
-
- def map(self, test_case):
- """Traces a single test case and returns its output."""
- cmd = self.cmd[:]
- cmd.append('--gtest_filter=%s' % test_case)
- tracename = test_case.replace('/', '-')
-
- out = []
- for retry in range(5):
- start = time.time()
- returncode, output = self.tracer.trace(
- cmd, self.cwd_dir, tracename, True)
- duration = time.time() - start
- # TODO(maruel): Define a way to detect if an strace log is valid.
- valid = True
- out.append(
- {
- 'test_case': test_case,
- 'returncode': returncode,
- 'duration': duration,
- 'valid': valid,
- 'output': output,
- })
- logging.debug(
- 'Tracing %s done: %d, %.1fs' % (test_case, returncode, duration))
- if retry:
- self.progress.update_item(
- '%s - %d' % (test_case, retry), True, not valid)
- else:
- self.progress.update_item(test_case, True, not valid)
- if valid:
- break
- return out
-
-
-def trace_test_cases(cmd, cwd_dir, test_cases, jobs, logname):
- """Traces test cases one by one."""
- assert os.path.isabs(cwd_dir) and os.path.isdir(cwd_dir)
-
- if not test_cases:
- return 0
-
- # Resolve any symlink.
- cwd_dir = os.path.realpath(cwd_dir)
- assert os.path.isdir(cwd_dir)
-
- progress = run_test_cases.Progress(len(test_cases))
- with run_test_cases.ThreadPool(jobs or multiprocessing.cpu_count()) as pool:
- api = trace_inputs.get_api()
- api.clean_trace(logname)
- with api.get_tracer(logname) as tracer:
- function = Tracer(tracer, cmd, cwd_dir, progress).map
- for test_case in test_cases:
- pool.add_task(function, test_case)
-
- pool.join(progress, 0.1)
- print('')
- return 0
-
-
-def main():
- """CLI frontend to validate arguments."""
- parser = run_test_cases.OptionParserTestCases(
- usage='%prog <options> [gtest]',
- description=sys.modules['__main__'].__doc__)
- parser.format_description = lambda *_: parser.description
- parser.add_option(
- '-o', '--out',
- help='output file, defaults to <executable>.test_cases')
- options, args = parser.parse_args()
-
- if not args:
- parser.error(
- 'Please provide the executable line to run, if you need fancy things '
- 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster'
- '.')
-
- cmd = run_test_cases.fix_python_path(args)
-
- if not options.out:
- options.out = '%s.test_cases' % cmd[-1]
-
- test_cases = parser.process_gtest_options(cmd, options)
-
- # Then run them.
- return trace_test_cases(
- cmd,
- os.getcwd(),
- test_cases,
- options.jobs,
- # TODO(maruel): options.timeout,
- options.out)
-
-
-if __name__ == '__main__':
- sys.exit(main())