summaryrefslogtreecommitdiffstats
path: root/tools/checkdeps
diff options
context:
space:
mode:
authorjochen@chromium.org <jochen@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-06-06 18:27:38 +0000
committerjochen@chromium.org <jochen@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-06-06 18:27:38 +0000
commita095e91aee6e0459c74217742d19b560547f99b0 (patch)
treee1d12af06b25c7787280a5388df7ea7ac953eadf /tools/checkdeps
parentc81cccf1f719cbdd8553ba394f4a03741cf7b23b (diff)
downloadchromium_src-a095e91aee6e0459c74217742d19b560547f99b0.zip
chromium_src-a095e91aee6e0459c74217742d19b560547f99b0.tar.gz
chromium_src-a095e91aee6e0459c74217742d19b560547f99b0.tar.bz2
Revert of Remove now obsolete checkdeps copy (https://codereview.chromium.org/307333003/)
Reason for revert: Still referenced from blink Original issue's description: > Remove now obsolete checkdeps copy > > BUG=none > R=brettw@chromium.org > > Committed: https://src.chromium.org/viewvc/chrome?view=rev&revision=275419 TBR=brettw@chromium.org NOTREECHECKS=true NOTRY=true BUG=none Review URL: https://codereview.chromium.org/320043002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@275482 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools/checkdeps')
-rw-r--r--tools/checkdeps/DEPS3
-rw-r--r--tools/checkdeps/OWNERS1
-rw-r--r--tools/checkdeps/PRESUBMIT.py25
-rwxr-xr-xtools/checkdeps/builddeps.py375
-rwxr-xr-xtools/checkdeps/checkdeps.py237
-rwxr-xr-xtools/checkdeps/checkdeps_test.py177
-rw-r--r--tools/checkdeps/cpp_checker.py113
-rwxr-xr-xtools/checkdeps/graphdeps.py407
-rw-r--r--tools/checkdeps/java_checker.py107
-rw-r--r--tools/checkdeps/results.py178
-rw-r--r--tools/checkdeps/rules.py178
-rw-r--r--tools/checkdeps/testdata/DEPS8
-rw-r--r--tools/checkdeps/testdata/allowed/DEPS12
-rw-r--r--tools/checkdeps/testdata/allowed/foo_unittest.cc5
-rw-r--r--tools/checkdeps/testdata/allowed/not_a_test.cc5
-rw-r--r--tools/checkdeps/testdata/allowed/test.h11
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/DEPS5
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/allowed/DEPS11
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc5
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc5
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/allowed/test.h11
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS3
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h5
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h11
-rw-r--r--tools/checkdeps/testdata/checkdeps_test/disallowed/test.h12
-rw-r--r--tools/checkdeps/testdata/disallowed/allowed/DEPS3
-rw-r--r--tools/checkdeps/testdata/disallowed/allowed/skipped/test.h5
-rw-r--r--tools/checkdeps/testdata/disallowed/allowed/test.h11
-rw-r--r--tools/checkdeps/testdata/disallowed/foo_unittest.cc10
-rw-r--r--tools/checkdeps/testdata/disallowed/test.h12
30 files changed, 1951 insertions, 0 deletions
diff --git a/tools/checkdeps/DEPS b/tools/checkdeps/DEPS
new file mode 100644
index 0000000..7a57b0b
--- /dev/null
+++ b/tools/checkdeps/DEPS
@@ -0,0 +1,3 @@
+skip_child_includes = [
+ "testdata",
+]
diff --git a/tools/checkdeps/OWNERS b/tools/checkdeps/OWNERS
new file mode 100644
index 0000000..06fefbf
--- /dev/null
+++ b/tools/checkdeps/OWNERS
@@ -0,0 +1 @@
+brettw@chromium.org
diff --git a/tools/checkdeps/PRESUBMIT.py b/tools/checkdeps/PRESUBMIT.py
new file mode 100644
index 0000000..5880d26
--- /dev/null
+++ b/tools/checkdeps/PRESUBMIT.py
@@ -0,0 +1,25 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for checkdeps tool.
+"""
+
+
+def CheckChange(input_api, output_api):
+ results = []
+ results.extend(input_api.canned_checks.RunUnitTests(
+ input_api, output_api,
+ [input_api.os_path.join(input_api.PresubmitLocalPath(),
+ 'checkdeps_test.py')]))
+ return results
+
+
+# Mandatory entrypoint.
+def CheckChangeOnUpload(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+
+# Mandatory entrypoint.
+def CheckChangeOnCommit(input_api, output_api):
+ return CheckChange(input_api, output_api)
diff --git a/tools/checkdeps/builddeps.py b/tools/checkdeps/builddeps.py
new file mode 100755
index 0000000..16dd7f3
--- /dev/null
+++ b/tools/checkdeps/builddeps.py
@@ -0,0 +1,375 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Traverses the source tree, parses all found DEPS files, and constructs
+a dependency rule table to be used by subclasses.
+
+The format of the deps file:
+
+First you have the normal module-level deps. These are the ones used by
+gclient. An example would be:
+
+ deps = {
+ "base":"http://foo.bar/trunk/base"
+ }
+
+DEPS files not in the top-level of a module won't need this. Then you
+have any additional include rules. You can add (using "+") or subtract
+(using "-") from the previously specified rules (including
+module-level deps). You can also specify a path that is allowed for
+now but that we intend to remove, using "!"; this is treated the same
+as "+" when check_deps is run by our bots, but a presubmit step will
+show a warning if you add a new include of a file that is only allowed
+by "!".
+
+Note that for .java files, there is currently no difference between
+"+" and "!", even in the presubmit step.
+
+ include_rules = [
+ # Code should be able to use base (it's specified in the module-level
+ # deps above), but nothing in "base/evil" because it's evil.
+ "-base/evil",
+
+ # But this one subdirectory of evil is OK.
+ "+base/evil/not",
+
+ # And it can include files from this other directory even though there is
+ # no deps rule for it.
+ "+tools/crime_fighter",
+
+ # This dependency is allowed for now but work is ongoing to remove it,
+ # so you shouldn't add further dependencies on it.
+ "!base/evil/ok_for_now.h",
+ ]
+
+If you have certain include rules that should only be applied for some
+files within this directory and subdirectories, you can write a
+section named specific_include_rules that is a hash map of regular
+expressions to the list of rules that should apply to files matching
+them. Note that such rules will always be applied before the rules
+from 'include_rules' have been applied, but the order in which rules
+associated with different regular expressions is applied is arbitrary.
+
+ specific_include_rules = {
+ ".*_(unit|browser|api)test\.cc": [
+ "+libraries/testsupport",
+ ],
+ }
+
+DEPS files may be placed anywhere in the tree. Each one applies to all
+subdirectories, where there may be more DEPS files that provide additions or
+subtractions for their own sub-trees.
+
+There is an implicit rule for the current directory (where the DEPS file lives)
+and all of its subdirectories. This prevents you from having to explicitly
+allow the current directory everywhere. This implicit rule is applied first,
+so you can modify or remove it using the normal include rules.
+
+The rules are processed in order. This means you can explicitly allow a higher
+directory and then take away permissions from sub-parts, or the reverse.
+
+Note that all directory separators must be slashes (Unix-style) and not
+backslashes. All directories should be relative to the source root and use
+only lowercase.
+"""
+
+import copy
+import os.path
+import posixpath
+import subprocess
+
+from rules import Rule, Rules
+
+
+# Variable name used in the DEPS file to add or subtract include files from
+# the module-level deps.
+INCLUDE_RULES_VAR_NAME = 'include_rules'
+
+# Variable name used in the DEPS file to add or subtract include files
+# from module-level deps specific to files whose basename (last
+# component of path) matches a given regular expression.
+SPECIFIC_INCLUDE_RULES_VAR_NAME = 'specific_include_rules'
+
+# Optionally present in the DEPS file to list subdirectories which should not
+# be checked. This allows us to skip third party code, for example.
+SKIP_SUBDIRS_VAR_NAME = 'skip_child_includes'
+
+
+def NormalizePath(path):
+ """Returns a path normalized to how we write DEPS rules and compare paths."""
+ return os.path.normcase(path).replace(os.path.sep, posixpath.sep)
+
+
+def _GitSourceDirectories(base_directory):
+ """Returns set of normalized paths to subdirectories containing sources
+ managed by git."""
+ if not os.path.exists(os.path.join(base_directory, '.git')):
+ return set()
+
+ base_dir_norm = NormalizePath(base_directory)
+ git_source_directories = set([base_dir_norm])
+
+ git_ls_files_cmd = ['git', 'ls-files']
+ # FIXME: Use a context manager in Python 3.2+
+ popen = subprocess.Popen(git_ls_files_cmd,
+ stdout=subprocess.PIPE,
+ bufsize=1, # line buffering, since read by line
+ cwd=base_directory)
+ try:
+ try:
+ for line in popen.stdout:
+ dir_path = os.path.join(base_directory, os.path.dirname(line))
+ dir_path_norm = NormalizePath(dir_path)
+ # Add the directory as well as all the parent directories,
+ # stopping once we reach an already-listed directory.
+ while dir_path_norm not in git_source_directories:
+ git_source_directories.add(dir_path_norm)
+ dir_path_norm = posixpath.dirname(dir_path_norm)
+ finally:
+ popen.stdout.close()
+ finally:
+ popen.wait()
+
+ return git_source_directories
+
+
+class DepsBuilder(object):
+ """Parses include_rules from DEPS files."""
+
+ def __init__(self,
+ base_directory=None,
+ verbose=False,
+ being_tested=False,
+ ignore_temp_rules=False,
+ ignore_specific_rules=False):
+ """Creates a new DepsBuilder.
+
+ Args:
+ base_directory: local path to root of checkout, e.g. C:\chr\src.
+ verbose: Set to True for debug output.
+ being_tested: Set to True to ignore the DEPS file at tools/checkdeps/DEPS.
+ ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
+ """
+ base_directory = (base_directory or
+ os.path.join(os.path.dirname(__file__),
+ os.path.pardir, os.path.pardir))
+ self.base_directory = os.path.abspath(base_directory) # Local absolute path
+ self.verbose = verbose
+ self._under_test = being_tested
+ self._ignore_temp_rules = ignore_temp_rules
+ self._ignore_specific_rules = ignore_specific_rules
+
+ # Set of normalized paths
+ self.git_source_directories = _GitSourceDirectories(self.base_directory)
+
+ # Map of normalized directory paths to rules to use for those
+ # directories, or None for directories that should be skipped.
+ # Normalized is: absolute, lowercase, / for separator.
+ self.directory_rules = {}
+ self._ApplyDirectoryRulesAndSkipSubdirs(Rules(), self.base_directory)
+
+ def _ApplyRules(self, existing_rules, includes, specific_includes,
+ cur_dir_norm):
+ """Applies the given include rules, returning the new rules.
+
+ Args:
+ existing_rules: A set of existing rules that will be combined.
+ include: The list of rules from the "include_rules" section of DEPS.
+ specific_includes: E.g. {'.*_unittest\.cc': ['+foo', '-blat']} rules
+ from the "specific_include_rules" section of DEPS.
+ cur_dir_norm: The current directory, normalized path. We will create an
+ implicit rule that allows inclusion from this directory.
+
+ Returns: A new set of rules combining the existing_rules with the other
+ arguments.
+ """
+ rules = copy.deepcopy(existing_rules)
+
+ # First apply the implicit "allow" rule for the current directory.
+ base_dir_norm = NormalizePath(self.base_directory)
+ if not cur_dir_norm.startswith(base_dir_norm):
+ raise Exception(
+ 'Internal error: base directory is not at the beginning for\n'
+ ' %s and base dir\n'
+ ' %s' % (cur_dir_norm, base_dir_norm))
+ relative_dir = posixpath.relpath(cur_dir_norm, base_dir_norm)
+
+ # Make the help string a little more meaningful.
+ source = relative_dir or 'top level'
+ rules.AddRule('+' + relative_dir,
+ relative_dir,
+ 'Default rule for ' + source)
+
+ def ApplyOneRule(rule_str, dependee_regexp=None):
+ """Deduces a sensible description for the rule being added, and
+ adds the rule with its description to |rules|.
+
+ If we are ignoring temporary rules, this function does nothing
+ for rules beginning with the Rule.TEMP_ALLOW character.
+ """
+ if self._ignore_temp_rules and rule_str.startswith(Rule.TEMP_ALLOW):
+ return
+
+ rule_block_name = 'include_rules'
+ if dependee_regexp:
+ rule_block_name = 'specific_include_rules'
+ if relative_dir:
+ rule_description = relative_dir + "'s %s" % rule_block_name
+ else:
+ rule_description = 'the top level %s' % rule_block_name
+ rules.AddRule(rule_str, relative_dir, rule_description, dependee_regexp)
+
+ # Apply the additional explicit rules.
+ for rule_str in includes:
+ ApplyOneRule(rule_str)
+
+ # Finally, apply the specific rules.
+ if self._ignore_specific_rules:
+ return rules
+
+ for regexp, specific_rules in specific_includes.iteritems():
+ for rule_str in specific_rules:
+ ApplyOneRule(rule_str, regexp)
+
+ return rules
+
+ def _ApplyDirectoryRules(self, existing_rules, dir_path_local_abs):
+ """Combines rules from the existing rules and the new directory.
+
+ Any directory can contain a DEPS file. Top-level DEPS files can contain
+ module dependencies which are used by gclient. We use these, along with
+ additional include rules and implicit rules for the given directory, to
+ come up with a combined set of rules to apply for the directory.
+
+ Args:
+ existing_rules: The rules for the parent directory. We'll add-on to these.
+ dir_path_local_abs: The directory path that the DEPS file may live in (if
+ it exists). This will also be used to generate the
+ implicit rules. This is a local path.
+
+ Returns: A 2-tuple of:
+ (1) the combined set of rules to apply to the sub-tree,
+ (2) a list of all subdirectories that should NOT be checked, as specified
+ in the DEPS file (if any).
+ Subdirectories are single words, hence no OS dependence.
+ """
+ dir_path_norm = NormalizePath(dir_path_local_abs)
+
+ # Check for a .svn directory in this directory or that this directory is
+ # contained in git source directories. This will tell us if it's a source
+ # directory and should be checked.
+ if not (os.path.exists(os.path.join(dir_path_local_abs, '.svn')) or
+ dir_path_norm in self.git_source_directories):
+ return None, []
+
+ # Check the DEPS file in this directory.
+ if self.verbose:
+ print 'Applying rules from', dir_path_local_abs
+ def FromImpl(*_):
+ pass # NOP function so "From" doesn't fail.
+
+ def FileImpl(_):
+ pass # NOP function so "File" doesn't fail.
+
+ class _VarImpl:
+ def __init__(self, local_scope):
+ self._local_scope = local_scope
+
+ def Lookup(self, var_name):
+ """Implements the Var syntax."""
+ try:
+ return self._local_scope['vars'][var_name]
+ except KeyError:
+ raise Exception('Var is not defined: %s' % var_name)
+
+ local_scope = {}
+ global_scope = {
+ 'File': FileImpl,
+ 'From': FromImpl,
+ 'Var': _VarImpl(local_scope).Lookup,
+ }
+ deps_file_path = os.path.join(dir_path_local_abs, 'DEPS')
+
+ # The second conditional here is to disregard the
+ # tools/checkdeps/DEPS file while running tests. This DEPS file
+ # has a skip_child_includes for 'testdata' which is necessary for
+ # running production tests, since there are intentional DEPS
+ # violations under the testdata directory. On the other hand when
+ # running tests, we absolutely need to verify the contents of that
+ # directory to trigger those intended violations and see that they
+ # are handled correctly.
+ if os.path.isfile(deps_file_path) and not (
+ self._under_test and
+ os.path.basename(dir_path_local_abs) == 'checkdeps'):
+ execfile(deps_file_path, global_scope, local_scope)
+ elif self.verbose:
+ print ' No deps file found in', dir_path_local_abs
+
+ # Even if a DEPS file does not exist we still invoke ApplyRules
+ # to apply the implicit "allow" rule for the current directory
+ include_rules = local_scope.get(INCLUDE_RULES_VAR_NAME, [])
+ specific_include_rules = local_scope.get(SPECIFIC_INCLUDE_RULES_VAR_NAME,
+ {})
+ skip_subdirs = local_scope.get(SKIP_SUBDIRS_VAR_NAME, [])
+
+ return (self._ApplyRules(existing_rules, include_rules,
+ specific_include_rules, dir_path_norm),
+ skip_subdirs)
+
+ def _ApplyDirectoryRulesAndSkipSubdirs(self, parent_rules,
+ dir_path_local_abs):
+ """Given |parent_rules| and a subdirectory |dir_path_local_abs| of the
+ directory that owns the |parent_rules|, add |dir_path_local_abs|'s rules to
+ |self.directory_rules|, and add None entries for any of its
+ subdirectories that should be skipped.
+ """
+ directory_rules, excluded_subdirs = self._ApplyDirectoryRules(
+ parent_rules, dir_path_local_abs)
+ dir_path_norm = NormalizePath(dir_path_local_abs)
+ self.directory_rules[dir_path_norm] = directory_rules
+ for subdir in excluded_subdirs:
+ subdir_path_norm = posixpath.join(dir_path_norm, subdir)
+ self.directory_rules[subdir_path_norm] = None
+
+ def GetDirectoryRules(self, dir_path_local):
+ """Returns a Rules object to use for the given directory, or None
+ if the given directory should be skipped.
+
+ Also modifies |self.directory_rules| to store the Rules.
+ This takes care of first building rules for parent directories (up to
+ |self.base_directory|) if needed, which may add rules for skipped
+ subdirectories.
+
+ Args:
+ dir_path_local: A local path to the directory you want rules for.
+ Can be relative and unnormalized.
+ """
+ if os.path.isabs(dir_path_local):
+ dir_path_local_abs = dir_path_local
+ else:
+ dir_path_local_abs = os.path.join(self.base_directory, dir_path_local)
+ dir_path_norm = NormalizePath(dir_path_local_abs)
+
+ if dir_path_norm in self.directory_rules:
+ return self.directory_rules[dir_path_norm]
+
+ parent_dir_local_abs = os.path.dirname(dir_path_local_abs)
+ parent_rules = self.GetDirectoryRules(parent_dir_local_abs)
+ # We need to check for an entry for our dir_path again, since
+ # GetDirectoryRules can modify entries for subdirectories, namely setting
+ # to None if they should be skipped, via _ApplyDirectoryRulesAndSkipSubdirs.
+ # For example, if dir_path == 'A/B/C' and A/B/DEPS specifies that the C
+ # subdirectory be skipped, GetDirectoryRules('A/B') will fill in the entry
+ # for 'A/B/C' as None.
+ if dir_path_norm in self.directory_rules:
+ return self.directory_rules[dir_path_norm]
+
+ if parent_rules:
+ self._ApplyDirectoryRulesAndSkipSubdirs(parent_rules, dir_path_local_abs)
+ else:
+ # If the parent directory should be skipped, then the current
+ # directory should also be skipped.
+ self.directory_rules[dir_path_norm] = None
+ return self.directory_rules[dir_path_norm]
diff --git a/tools/checkdeps/checkdeps.py b/tools/checkdeps/checkdeps.py
new file mode 100755
index 0000000..83f59ae
--- /dev/null
+++ b/tools/checkdeps/checkdeps.py
@@ -0,0 +1,237 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Makes sure that files include headers from allowed directories.
+
+Checks DEPS files in the source tree for rules, and applies those rules to
+"#include" and "import" directives in the .cpp and .java source files.
+Any source file including something not permitted by the DEPS files will fail.
+
+See builddeps.py for a detailed description of the DEPS format.
+"""
+
+import os
+import optparse
+import re
+import sys
+
+import cpp_checker
+import java_checker
+import results
+
+from builddeps import DepsBuilder
+from rules import Rule, Rules
+
+
+def _IsTestFile(filename):
+ """Does a rudimentary check to try to skip test files; this could be
+ improved but is good enough for now.
+ """
+ return re.match('(test|mock|dummy)_.*|.*_[a-z]*test\.(cc|mm|java)', filename)
+
+
+class DepsChecker(DepsBuilder):
+ """Parses include_rules from DEPS files and verifies files in the
+ source tree against them.
+ """
+
+ def __init__(self,
+ base_directory=None,
+ verbose=False,
+ being_tested=False,
+ ignore_temp_rules=False,
+ skip_tests=False):
+ """Creates a new DepsChecker.
+
+ Args:
+ base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src.
+ verbose: Set to true for debug output.
+ being_tested: Set to true to ignore the DEPS file at tools/checkdeps/DEPS.
+ ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
+ """
+ DepsBuilder.__init__(
+ self, base_directory, verbose, being_tested, ignore_temp_rules)
+
+ self._skip_tests = skip_tests
+ self.results_formatter = results.NormalResultsFormatter(verbose)
+
+ def Report(self):
+ """Prints a report of results, and returns an exit code for the process."""
+ if self.results_formatter.GetResults():
+ self.results_formatter.PrintResults()
+ return 1
+ print '\nSUCCESS\n'
+ return 0
+
+ def CheckDirectory(self, start_dir):
+ """Checks all relevant source files in the specified directory and
+ its subdirectories for compliance with DEPS rules throughout the
+ tree (starting at |self.base_directory|). |start_dir| must be a
+ subdirectory of |self.base_directory|.
+
+ On completion, self.results_formatter has the results of
+ processing, and calling Report() will print a report of results.
+ """
+ java = java_checker.JavaChecker(self.base_directory, self.verbose)
+ cpp = cpp_checker.CppChecker(self.verbose)
+ checkers = dict(
+ (extension, checker)
+ for checker in [java, cpp] for extension in checker.EXTENSIONS)
+ self._CheckDirectoryImpl(checkers, start_dir)
+
+ def _CheckDirectoryImpl(self, checkers, dir_name):
+ rules = self.GetDirectoryRules(dir_name)
+ if rules is None:
+ return
+
+ # Collect a list of all files and directories to check.
+ files_to_check = []
+ dirs_to_check = []
+ contents = sorted(os.listdir(dir_name))
+ for cur in contents:
+ full_name = os.path.join(dir_name, cur)
+ if os.path.isdir(full_name):
+ dirs_to_check.append(full_name)
+ elif os.path.splitext(full_name)[1] in checkers:
+ if not self._skip_tests or not _IsTestFile(cur):
+ files_to_check.append(full_name)
+
+ # First check all files in this directory.
+ for cur in files_to_check:
+ checker = checkers[os.path.splitext(cur)[1]]
+ file_status = checker.CheckFile(rules, cur)
+ if file_status.HasViolations():
+ self.results_formatter.AddError(file_status)
+
+ # Next recurse into the subdirectories.
+ for cur in dirs_to_check:
+ self._CheckDirectoryImpl(checkers, cur)
+
+ def CheckAddedCppIncludes(self, added_includes):
+ """This is used from PRESUBMIT.py to check new #include statements added in
+ the change being presubmit checked.
+
+ Args:
+ added_includes: ((file_path, (include_line, include_line, ...), ...)
+
+ Return:
+ A list of tuples, (bad_file_path, rule_type, rule_description)
+ where rule_type is one of Rule.DISALLOW or Rule.TEMP_ALLOW and
+ rule_description is human-readable. Empty if no problems.
+ """
+ cpp = cpp_checker.CppChecker(self.verbose)
+ problems = []
+ for file_path, include_lines in added_includes:
+ if not cpp.IsCppFile(file_path):
+ continue
+ rules_for_file = self.GetDirectoryRules(os.path.dirname(file_path))
+ if not rules_for_file:
+ continue
+ for line in include_lines:
+ is_include, violation = cpp.CheckLine(
+ rules_for_file, line, file_path, True)
+ if not violation:
+ continue
+ rule_type = violation.violated_rule.allow
+ if rule_type == Rule.ALLOW:
+ continue
+ violation_text = results.NormalResultsFormatter.FormatViolation(
+ violation, self.verbose)
+ problems.append((file_path, rule_type, violation_text))
+ return problems
+
+
+def PrintUsage():
+ print """Usage: python checkdeps.py [--root <root>] [tocheck]
+
+ --root ROOT Specifies the repository root. This defaults to "../../.."
+ relative to the script file. This will be correct given the
+ normal location of the script in "<root>/tools/checkdeps".
+
+ --(others) There are a few lesser-used options; run with --help to show them.
+
+ tocheck Specifies the directory, relative to root, to check. This defaults
+ to "." so it checks everything.
+
+Examples:
+ python checkdeps.py
+ python checkdeps.py --root c:\\source chrome"""
+
+
+def main():
+ option_parser = optparse.OptionParser()
+ option_parser.add_option(
+ '', '--root',
+ default='', dest='base_directory',
+ help='Specifies the repository root. This defaults '
+ 'to "../../.." relative to the script file, which '
+ 'will normally be the repository root.')
+ option_parser.add_option(
+ '', '--ignore-temp-rules',
+ action='store_true', dest='ignore_temp_rules', default=False,
+ help='Ignore !-prefixed (temporary) rules.')
+ option_parser.add_option(
+ '', '--generate-temp-rules',
+ action='store_true', dest='generate_temp_rules', default=False,
+ help='Print rules to temporarily allow files that fail '
+ 'dependency checking.')
+ option_parser.add_option(
+ '', '--count-violations',
+ action='store_true', dest='count_violations', default=False,
+ help='Count #includes in violation of intended rules.')
+ option_parser.add_option(
+ '', '--skip-tests',
+ action='store_true', dest='skip_tests', default=False,
+ help='Skip checking test files (best effort).')
+ option_parser.add_option(
+ '-v', '--verbose',
+ action='store_true', default=False,
+ help='Print debug logging')
+ option_parser.add_option(
+ '', '--json',
+ help='Path to JSON output file')
+ options, args = option_parser.parse_args()
+
+ deps_checker = DepsChecker(options.base_directory,
+ verbose=options.verbose,
+ ignore_temp_rules=options.ignore_temp_rules,
+ skip_tests=options.skip_tests)
+ base_directory = deps_checker.base_directory # Default if needed, normalized
+
+ # Figure out which directory we have to check.
+ start_dir = base_directory
+ if len(args) == 1:
+ # Directory specified. Start here. It's supposed to be relative to the
+ # base directory.
+ start_dir = os.path.abspath(os.path.join(base_directory, args[0]))
+ elif len(args) >= 2 or (options.generate_temp_rules and
+ options.count_violations):
+ # More than one argument, or incompatible flags, we don't handle this.
+ PrintUsage()
+ return 1
+
+ if not start_dir.startswith(deps_checker.base_directory):
+ print 'Directory to check must be a subdirectory of the base directory,'
+ print 'but %s is not a subdirectory of %s' % (start_dir, base_directory)
+ return 1
+
+ print 'Using base directory:', base_directory
+ print 'Checking:', start_dir
+
+ if options.generate_temp_rules:
+ deps_checker.results_formatter = results.TemporaryRulesFormatter()
+ elif options.count_violations:
+ deps_checker.results_formatter = results.CountViolationsFormatter()
+
+ if options.json:
+ deps_checker.results_formatter = results.JSONResultsFormatter(
+ options.json, deps_checker.results_formatter)
+
+ deps_checker.CheckDirectory(start_dir)
+ return deps_checker.Report()
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/tools/checkdeps/checkdeps_test.py b/tools/checkdeps/checkdeps_test.py
new file mode 100755
index 0000000..1f93db5
--- /dev/null
+++ b/tools/checkdeps/checkdeps_test.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for checkdeps.
+"""
+
+import os
+import unittest
+
+
+import checkdeps
+import results
+
+
+class CheckDepsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.deps_checker = checkdeps.DepsChecker(being_tested=True)
+
+ def ImplTestRegularCheckDepsRun(self, ignore_temp_rules, skip_tests):
+ self.deps_checker._ignore_temp_rules = ignore_temp_rules
+ self.deps_checker._skip_tests = skip_tests
+ self.deps_checker.CheckDirectory(
+ os.path.join(self.deps_checker.base_directory,
+ 'tools/checkdeps/testdata'))
+
+ problems = self.deps_checker.results_formatter.GetResults()
+ if skip_tests:
+ self.failUnlessEqual(3, len(problems))
+ else:
+ self.failUnlessEqual(4, len(problems))
+
+ def VerifySubstringsInProblems(key_path, substrings_in_sequence):
+ """Finds the problem in |problems| that contains |key_path|,
+ then verifies that each of |substrings_in_sequence| occurs in
+ that problem, in the order they appear in
+ |substrings_in_sequence|.
+ """
+ found = False
+ key_path = os.path.normpath(key_path)
+ for problem in problems:
+ index = problem.find(key_path)
+ if index != -1:
+ for substring in substrings_in_sequence:
+ index = problem.find(substring, index + 1)
+ self.failUnless(index != -1, '%s in %s' % (substring, problem))
+ found = True
+ break
+ if not found:
+ self.fail('Found no problem for file %s' % key_path)
+
+ if ignore_temp_rules:
+ VerifySubstringsInProblems('testdata/allowed/test.h',
+ ['-tools/checkdeps/testdata/disallowed',
+ 'temporarily_allowed.h',
+ '-third_party/explicitly_disallowed',
+ 'Because of no rule applying'])
+ else:
+ VerifySubstringsInProblems('testdata/allowed/test.h',
+ ['-tools/checkdeps/testdata/disallowed',
+ '-third_party/explicitly_disallowed',
+ 'Because of no rule applying'])
+
+ VerifySubstringsInProblems('testdata/disallowed/test.h',
+ ['-third_party/explicitly_disallowed',
+ 'Because of no rule applying',
+ 'Because of no rule applying'])
+ VerifySubstringsInProblems('disallowed/allowed/test.h',
+ ['-third_party/explicitly_disallowed',
+ 'Because of no rule applying',
+ 'Because of no rule applying'])
+
+ if not skip_tests:
+ VerifySubstringsInProblems('allowed/not_a_test.cc',
+ ['-tools/checkdeps/testdata/disallowed'])
+
+ def testRegularCheckDepsRun(self):
+ self.ImplTestRegularCheckDepsRun(False, False)
+
+ def testRegularCheckDepsRunIgnoringTempRules(self):
+ self.ImplTestRegularCheckDepsRun(True, False)
+
+ def testRegularCheckDepsRunSkipTests(self):
+ self.ImplTestRegularCheckDepsRun(False, True)
+
+ def testRegularCheckDepsRunIgnoringTempRulesSkipTests(self):
+ self.ImplTestRegularCheckDepsRun(True, True)
+
+ def CountViolations(self, ignore_temp_rules):
+ self.deps_checker._ignore_temp_rules = ignore_temp_rules
+ self.deps_checker.results_formatter = results.CountViolationsFormatter()
+ self.deps_checker.CheckDirectory(
+ os.path.join(self.deps_checker.base_directory,
+ 'tools/checkdeps/testdata'))
+ return self.deps_checker.results_formatter.GetResults()
+
+ def testCountViolations(self):
+ self.failUnlessEqual('10', self.CountViolations(False))
+
+ def testCountViolationsIgnoringTempRules(self):
+ self.failUnlessEqual('11', self.CountViolations(True))
+
+ def testTempRulesGenerator(self):
+ self.deps_checker.results_formatter = results.TemporaryRulesFormatter()
+ self.deps_checker.CheckDirectory(
+ os.path.join(self.deps_checker.base_directory,
+ 'tools/checkdeps/testdata/allowed'))
+ temp_rules = self.deps_checker.results_formatter.GetResults()
+ expected = [u' "!third_party/explicitly_disallowed/bad.h",',
+ u' "!third_party/no_rule/bad.h",',
+ u' "!tools/checkdeps/testdata/disallowed/bad.h",',
+ u' "!tools/checkdeps/testdata/disallowed/teststuff/bad.h",']
+ self.failUnlessEqual(expected, temp_rules)
+
+ def testCheckAddedIncludesAllGood(self):
+ problems = self.deps_checker.CheckAddedCppIncludes(
+ [['tools/checkdeps/testdata/allowed/test.cc',
+ ['#include "tools/checkdeps/testdata/allowed/good.h"',
+ '#include "tools/checkdeps/testdata/disallowed/allowed/good.h"']
+ ]])
+ self.failIf(problems)
+
+ def testCheckAddedIncludesManyGarbageLines(self):
+ garbage_lines = ["My name is Sam%d\n" % num for num in range(50)]
+ problems = self.deps_checker.CheckAddedCppIncludes(
+ [['tools/checkdeps/testdata/allowed/test.cc', garbage_lines]])
+ self.failIf(problems)
+
+ def testCheckAddedIncludesNoRule(self):
+ problems = self.deps_checker.CheckAddedCppIncludes(
+ [['tools/checkdeps/testdata/allowed/test.cc',
+ ['#include "no_rule_for_this/nogood.h"']
+ ]])
+ self.failUnless(problems)
+
+ def testCheckAddedIncludesSkippedDirectory(self):
+ problems = self.deps_checker.CheckAddedCppIncludes(
+ [['tools/checkdeps/testdata/disallowed/allowed/skipped/test.cc',
+ ['#include "whatever/whocares.h"']
+ ]])
+ self.failIf(problems)
+
+ def testCheckAddedIncludesTempAllowed(self):
+ problems = self.deps_checker.CheckAddedCppIncludes(
+ [['tools/checkdeps/testdata/allowed/test.cc',
+ ['#include "tools/checkdeps/testdata/disallowed/temporarily_allowed.h"']
+ ]])
+ self.failUnless(problems)
+
+ def testCopyIsDeep(self):
+ # Regression test for a bug where we were making shallow copies of
+ # Rules objects and therefore all Rules objects shared the same
+ # dictionary for specific rules.
+ #
+ # The first pair should bring in a rule from testdata/allowed/DEPS
+ # into that global dictionary that allows the
+ # temp_allowed_for_tests.h file to be included in files ending
+ # with _unittest.cc, and the second pair should completely fail
+ # once the bug is fixed, but succeed (with a temporary allowance)
+ # if the bug is in place.
+ problems = self.deps_checker.CheckAddedCppIncludes(
+ [['tools/checkdeps/testdata/allowed/test.cc',
+ ['#include "tools/checkdeps/testdata/disallowed/temporarily_allowed.h"']
+ ],
+ ['tools/checkdeps/testdata/disallowed/foo_unittest.cc',
+ ['#include "tools/checkdeps/testdata/bongo/temp_allowed_for_tests.h"']
+ ]])
+ # With the bug in place, there would be two problems reported, and
+ # the second would be for foo_unittest.cc.
+ self.failUnless(len(problems) == 1)
+ self.failUnless(problems[0][0].endswith('/test.cc'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/checkdeps/cpp_checker.py b/tools/checkdeps/cpp_checker.py
new file mode 100644
index 0000000..ca28e4d
--- /dev/null
+++ b/tools/checkdeps/cpp_checker.py
@@ -0,0 +1,113 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks C++ and Objective-C files for illegal includes."""
+
+import codecs
+import os
+import re
+
+import results
+from rules import Rule, MessageRule
+
+
+class CppChecker(object):
+
+ EXTENSIONS = [
+ '.h',
+ '.cc',
+ '.cpp',
+ '.m',
+ '.mm',
+ ]
+
+ # The maximum number of non-include lines we can see before giving up.
+ _MAX_UNINTERESTING_LINES = 50
+
+ # The maximum line length, this is to be efficient in the case of very long
+ # lines (which can't be #includes).
+ _MAX_LINE_LENGTH = 128
+
+ # This regular expression will be used to extract filenames from include
+ # statements.
+ _EXTRACT_INCLUDE_PATH = re.compile(
+ '[ \t]*#[ \t]*(?:include|import)[ \t]+"(.*)"')
+
+ def __init__(self, verbose):
+ self._verbose = verbose
+
+ def CheckLine(self, rules, line, dependee_path, fail_on_temp_allow=False):
+ """Checks the given line with the given rule set.
+
+ Returns a tuple (is_include, dependency_violation) where
+ is_include is True only if the line is an #include or #import
+ statement, and dependency_violation is an instance of
+ results.DependencyViolation if the line violates a rule, or None
+ if it does not.
+ """
+ found_item = self._EXTRACT_INCLUDE_PATH.match(line)
+ if not found_item:
+ return False, None # Not a match
+
+ include_path = found_item.group(1)
+
+ if '\\' in include_path:
+ return True, results.DependencyViolation(
+ include_path,
+ MessageRule('Include paths may not include backslashes.'),
+ rules)
+
+ if '/' not in include_path:
+ # Don't fail when no directory is specified. We may want to be more
+ # strict about this in the future.
+ if self._verbose:
+ print ' WARNING: include specified with no directory: ' + include_path
+ return True, None
+
+ rule = rules.RuleApplyingTo(include_path, dependee_path)
+ if (rule.allow == Rule.DISALLOW or
+ (fail_on_temp_allow and rule.allow == Rule.TEMP_ALLOW)):
+ return True, results.DependencyViolation(include_path, rule, rules)
+ return True, None
+
+ def CheckFile(self, rules, filepath):
+ if self._verbose:
+ print 'Checking: ' + filepath
+
+ dependee_status = results.DependeeStatus(filepath)
+ ret_val = '' # We'll collect the error messages in here
+ last_include = 0
+ with codecs.open(filepath, encoding='utf-8') as f:
+ in_if0 = 0
+ for line_num, line in enumerate(f):
+ if line_num - last_include > self._MAX_UNINTERESTING_LINES:
+ break
+
+ line = line.strip()
+
+ # Check to see if we're at / inside an #if 0 block
+ if line.startswith('#if 0'):
+ in_if0 += 1
+ continue
+ if in_if0 > 0:
+ if line.startswith('#if'):
+ in_if0 += 1
+ elif line.startswith('#endif'):
+ in_if0 -= 1
+ continue
+
+ is_include, violation = self.CheckLine(rules, line, filepath)
+ if is_include:
+ last_include = line_num
+ if violation:
+ dependee_status.AddViolation(violation)
+
+ return dependee_status
+
+ @staticmethod
+ def IsCppFile(file_path):
+ """Returns True iff the given path ends in one of the extensions
+ handled by this checker.
+ """
+ return os.path.splitext(file_path)[1] in CppChecker.EXTENSIONS
diff --git a/tools/checkdeps/graphdeps.py b/tools/checkdeps/graphdeps.py
new file mode 100755
index 0000000..e6dee8e
--- /dev/null
+++ b/tools/checkdeps/graphdeps.py
@@ -0,0 +1,407 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dumps a graph of allowed and disallowed inter-module dependencies described
+by the DEPS files in the source tree. Supports DOT and PNG as the output format.
+
+Enables filtering and differential highlighting of parts of the graph based on
+the specified criteria. This allows for a much easier visual analysis of the
+dependencies, including answering questions such as "if a new source must
+depend on modules A, B, and C, what valid options among the existing modules
+are there to put it in."
+
+See builddeps.py for a detailed description of the DEPS format.
+"""
+
+import os
+import optparse
+import pipes
+import re
+import sys
+
+from builddeps import DepsBuilder
+from rules import Rule
+
+
+class DepsGrapher(DepsBuilder):
+ """Parses include_rules from DEPS files and outputs a DOT graph of the
+ allowed and disallowed dependencies between directories and specific file
+ regexps. Can generate only a subgraph of the whole dependency graph
+ corresponding to the provided inclusion and exclusion regexp filters.
+ Also can highlight fanins and/or fanouts of certain nodes matching the
+ provided regexp patterns.
+ """
+
+ def __init__(self,
+ base_directory,
+ verbose,
+ being_tested,
+ ignore_temp_rules,
+ ignore_specific_rules,
+ hide_disallowed_deps,
+ out_file,
+ out_format,
+ layout_engine,
+ unflatten_graph,
+ incl,
+ excl,
+ hilite_fanins,
+ hilite_fanouts):
+ """Creates a new DepsGrapher.
+
+ Args:
+ base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src.
+ verbose: Set to true for debug output.
+ being_tested: Set to true to ignore the DEPS file at tools/graphdeps/DEPS.
+ ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
+ ignore_specific_rules: Ignore rules from specific_include_rules sections.
+ hide_disallowed_deps: Hide disallowed dependencies from the output graph.
+ out_file: Output file name.
+ out_format: Output format (anything GraphViz dot's -T option supports).
+ layout_engine: Layout engine for formats other than 'dot'
+ (anything that GraphViz dot's -K option supports).
+ unflatten_graph: Try to reformat the output graph so it is narrower and
+ taller. Helps fight overly flat and wide graphs, but
+ sometimes produces a worse result.
+ incl: Include only nodes matching this regexp; such nodes' fanin/fanout
+ is also included.
+ excl: Exclude nodes matching this regexp; such nodes' fanin/fanout is
+ processed independently.
+ hilite_fanins: Highlight fanins of nodes matching this regexp with a
+ different edge and node color.
+ hilite_fanouts: Highlight fanouts of nodes matching this regexp with a
+ different edge and node color.
+ """
+ DepsBuilder.__init__(
+ self,
+ base_directory,
+ verbose,
+ being_tested,
+ ignore_temp_rules,
+ ignore_specific_rules)
+
+ self.ignore_temp_rules = ignore_temp_rules
+ self.ignore_specific_rules = ignore_specific_rules
+ self.hide_disallowed_deps = hide_disallowed_deps
+ self.out_file = out_file
+ self.out_format = out_format
+ self.layout_engine = layout_engine
+ self.unflatten_graph = unflatten_graph
+ self.incl = incl
+ self.excl = excl
+ self.hilite_fanins = hilite_fanins
+ self.hilite_fanouts = hilite_fanouts
+
+ self.deps = set()
+
+ def DumpDependencies(self):
+ """ Builds a dependency rule table and dumps the corresponding dependency
+ graph to all requested formats."""
+ self._BuildDepsGraph(self.base_directory)
+ self._DumpDependencies()
+
+ def _BuildDepsGraph(self, full_path):
+ """Recursively traverses the source tree starting at the specified directory
+ and builds a dependency graph representation in self.deps."""
+ rel_path = os.path.relpath(full_path, self.base_directory)
+ #if re.search(self.incl, rel_path) and not re.search(self.excl, rel_path):
+ rules = self.GetDirectoryRules(full_path)
+ if rules:
+ deps = rules.AsDependencyTuples(
+ include_general_rules=True,
+ include_specific_rules=not self.ignore_specific_rules)
+ self.deps.update(deps)
+
+ for item in sorted(os.listdir(full_path)):
+ next_full_path = os.path.join(full_path, item)
+ if os.path.isdir(next_full_path):
+ self._BuildDepsGraph(next_full_path)
+
+ def _DumpDependencies(self):
+ """Dumps the built dependency graph to the specified file with specified
+ format."""
+ if self.out_format == 'dot' and not self.layout_engine:
+ if self.unflatten_graph:
+ pipe = pipes.Template()
+ pipe.append('unflatten -l 2 -c 3', '--')
+ out = pipe.open(self.out_file, 'w')
+ else:
+ out = open(self.out_file, 'w')
+ else:
+ pipe = pipes.Template()
+ if self.unflatten_graph:
+ pipe.append('unflatten -l 2 -c 3', '--')
+ dot_cmd = 'dot -T' + self.out_format
+ if self.layout_engine:
+ dot_cmd += ' -K' + self.layout_engine
+ pipe.append(dot_cmd, '--')
+ out = pipe.open(self.out_file, 'w')
+
+ self._DumpDependenciesImpl(self.deps, out)
+ out.close()
+
+ def _DumpDependenciesImpl(self, deps, out):
+ """Computes nodes' and edges' properties for the dependency graph |deps| and
+ carries out the actual dumping to a file/pipe |out|."""
+ deps_graph = dict()
+ deps_srcs = set()
+
+ # Pre-initialize the graph with src->(dst, allow) pairs.
+ for (allow, src, dst) in deps:
+ if allow == Rule.TEMP_ALLOW and self.ignore_temp_rules:
+ continue
+
+ deps_srcs.add(src)
+ if src not in deps_graph:
+ deps_graph[src] = []
+ deps_graph[src].append((dst, allow))
+
+ # Add all hierarchical parents too, in case some of them don't have their
+ # own DEPS, and therefore are missing from the list of rules. Those will
+ # be recursively populated with their parents' rules in the next block.
+ parent_src = os.path.dirname(src)
+ while parent_src:
+ if parent_src not in deps_graph:
+ deps_graph[parent_src] = []
+ parent_src = os.path.dirname(parent_src)
+
+ # For every node, propagate its rules down to all its children.
+ deps_srcs = list(deps_srcs)
+ deps_srcs.sort()
+ for src in deps_srcs:
+ parent_src = os.path.dirname(src)
+ if parent_src:
+ # We presort the list, so parents are guaranteed to precede children.
+ assert parent_src in deps_graph,\
+ "src: %s, parent_src: %s" % (src, parent_src)
+ for (dst, allow) in deps_graph[parent_src]:
+ # Check that this node does not explicitly override a rule from the
+ # parent that we're about to add.
+ if ((dst, Rule.ALLOW) not in deps_graph[src]) and \
+ ((dst, Rule.TEMP_ALLOW) not in deps_graph[src]) and \
+ ((dst, Rule.DISALLOW) not in deps_graph[src]):
+ deps_graph[src].append((dst, allow))
+
+ node_props = {}
+ edges = []
+
+ # 1) Populate a list of edge specifications in DOT format;
+ # 2) Populate a list of computed raw node attributes to be output as node
+ # specifications in DOT format later on.
+ # Edges and nodes are emphasized with color and line/border weight depending
+ # on how many of incl/excl/hilite_fanins/hilite_fanouts filters they hit,
+ # and in what way.
+ for src in deps_graph.keys():
+ for (dst, allow) in deps_graph[src]:
+ if allow == Rule.DISALLOW and self.hide_disallowed_deps:
+ continue
+
+ if allow == Rule.ALLOW and src == dst:
+ continue
+
+ edge_spec = "%s->%s" % (src, dst)
+ if not re.search(self.incl, edge_spec) or \
+ re.search(self.excl, edge_spec):
+ continue
+
+ if src not in node_props:
+ node_props[src] = {'hilite': None, 'degree': 0}
+ if dst not in node_props:
+ node_props[dst] = {'hilite': None, 'degree': 0}
+
+ edge_weight = 1
+
+ if self.hilite_fanouts and re.search(self.hilite_fanouts, src):
+ node_props[src]['hilite'] = 'lightgreen'
+ node_props[dst]['hilite'] = 'lightblue'
+ node_props[dst]['degree'] += 1
+ edge_weight += 1
+
+ if self.hilite_fanins and re.search(self.hilite_fanins, dst):
+ node_props[src]['hilite'] = 'lightblue'
+ node_props[dst]['hilite'] = 'lightgreen'
+ node_props[src]['degree'] += 1
+ edge_weight += 1
+
+ if allow == Rule.ALLOW:
+ edge_color = (edge_weight > 1) and 'blue' or 'green'
+ edge_style = 'solid'
+ elif allow == Rule.TEMP_ALLOW:
+ edge_color = (edge_weight > 1) and 'blue' or 'green'
+ edge_style = 'dashed'
+ else:
+ edge_color = 'red'
+ edge_style = 'dashed'
+ edges.append(' "%s" -> "%s" [style=%s,color=%s,penwidth=%d];' % \
+ (src, dst, edge_style, edge_color, edge_weight))
+
+ # Reformat the computed raw node attributes into a final DOT representation.
+ nodes = []
+ for (node, attrs) in node_props.iteritems():
+ attr_strs = []
+ if attrs['hilite']:
+ attr_strs.append('style=filled,fillcolor=%s' % attrs['hilite'])
+ attr_strs.append('penwidth=%d' % (attrs['degree'] or 1))
+ nodes.append(' "%s" [%s];' % (node, ','.join(attr_strs)))
+
+ # Output nodes and edges to |out| (can be a file or a pipe).
+ edges.sort()
+ nodes.sort()
+ out.write('digraph DEPS {\n'
+ ' fontsize=8;\n')
+ out.write('\n'.join(nodes))
+ out.write('\n\n')
+ out.write('\n'.join(edges))
+ out.write('\n}\n')
+ out.close()
+
+
+def PrintUsage():
+ print """Usage: python graphdeps.py [--root <root>]
+
+ --root ROOT Specifies the repository root. This defaults to "../../.."
+ relative to the script file. This will be correct given the
+ normal location of the script in "<root>/tools/graphdeps".
+
+ --(others) There are a few lesser-used options; run with --help to show them.
+
+Examples:
+ Dump the whole dependency graph:
+ graphdeps.py
+ Find a suitable place for a new source that must depend on /apps and
+ /content/browser/renderer_host. Limit potential candidates to /apps,
+ /chrome/browser and content/browser, and descendants of those three.
+ Generate both DOT and PNG output. The output will highlight the fanins
+ of /apps and /content/browser/renderer_host. Overlapping nodes in both fanins
+ will be emphasized by a thicker border. Those nodes are the ones that are
+ allowed to depend on both targets, therefore they are all legal candidates
+ to place the new source in:
+ graphdeps.py \
+ --root=./src \
+ --out=./DEPS.svg \
+ --format=svg \
+ --incl='^(apps|chrome/browser|content/browser)->.*' \
+ --excl='.*->third_party' \
+ --fanin='^(apps|content/browser/renderer_host)$' \
+ --ignore-specific-rules \
+ --ignore-temp-rules"""
+
+
+def main():
+ option_parser = optparse.OptionParser()
+ option_parser.add_option(
+ "", "--root",
+ default="", dest="base_directory",
+ help="Specifies the repository root. This defaults "
+ "to '../../..' relative to the script file, which "
+ "will normally be the repository root.")
+ option_parser.add_option(
+ "-f", "--format",
+ dest="out_format", default="dot",
+ help="Output file format. "
+ "Can be anything that GraphViz dot's -T option supports. "
+ "The most useful ones are: dot (text), svg (image), pdf (image)."
+ "NOTES: dotty has a known problem with fonts when displaying DOT "
+ "files on Ubuntu - if labels are unreadable, try other formats.")
+ option_parser.add_option(
+ "-o", "--out",
+ dest="out_file", default="DEPS",
+ help="Output file name. If the name does not end in an extension "
+ "matching the output format, that extension is automatically "
+ "appended.")
+ option_parser.add_option(
+ "-l", "--layout-engine",
+ dest="layout_engine", default="",
+ help="Layout rendering engine. "
+ "Can be anything that GraphViz dot's -K option supports. "
+ "The most useful are in decreasing order: dot, fdp, circo, osage. "
+ "NOTE: '-f dot' and '-f dot -l dot' are different: the former "
+ "will dump a raw DOT graph and stop; the latter will further "
+ "filter it through 'dot -Tdot -Kdot' layout engine.")
+ option_parser.add_option(
+ "-i", "--incl",
+ default="^.*$", dest="incl",
+ help="Include only edges of the graph that match the specified regexp. "
+ "The regexp is applied to edges of the graph formatted as "
+ "'source_node->target_node', where the '->' part is vebatim. "
+ "Therefore, a reliable regexp should look like "
+ "'^(chrome|chrome/browser|chrome/common)->content/public/browser$' "
+ "or similar, with both source and target node regexps present, "
+ "explicit ^ and $, and otherwise being as specific as possible.")
+ option_parser.add_option(
+ "-e", "--excl",
+ default="^$", dest="excl",
+ help="Exclude dependent nodes that match the specified regexp. "
+ "See --incl for details on the format.")
+ option_parser.add_option(
+ "", "--fanin",
+ default="", dest="hilite_fanins",
+ help="Highlight fanins of nodes matching the specified regexp.")
+ option_parser.add_option(
+ "", "--fanout",
+ default="", dest="hilite_fanouts",
+ help="Highlight fanouts of nodes matching the specified regexp.")
+ option_parser.add_option(
+ "", "--ignore-temp-rules",
+ action="store_true", dest="ignore_temp_rules", default=False,
+ help="Ignore !-prefixed (temporary) rules in DEPS files.")
+ option_parser.add_option(
+ "", "--ignore-specific-rules",
+ action="store_true", dest="ignore_specific_rules", default=False,
+ help="Ignore specific_include_rules section of DEPS files.")
+ option_parser.add_option(
+ "", "--hide-disallowed-deps",
+ action="store_true", dest="hide_disallowed_deps", default=False,
+ help="Hide disallowed dependencies in the output graph.")
+ option_parser.add_option(
+ "", "--unflatten",
+ action="store_true", dest="unflatten_graph", default=False,
+ help="Try to reformat the output graph so it is narrower and taller. "
+ "Helps fight overly flat and wide graphs, but sometimes produces "
+ "inferior results.")
+ option_parser.add_option(
+ "-v", "--verbose",
+ action="store_true", default=False,
+ help="Print debug logging")
+ options, args = option_parser.parse_args()
+
+ if not options.out_file.endswith(options.out_format):
+ options.out_file += '.' + options.out_format
+
+ deps_grapher = DepsGrapher(
+ base_directory=options.base_directory,
+ verbose=options.verbose,
+ being_tested=False,
+
+ ignore_temp_rules=options.ignore_temp_rules,
+ ignore_specific_rules=options.ignore_specific_rules,
+ hide_disallowed_deps=options.hide_disallowed_deps,
+
+ out_file=options.out_file,
+ out_format=options.out_format,
+ layout_engine=options.layout_engine,
+ unflatten_graph=options.unflatten_graph,
+
+ incl=options.incl,
+ excl=options.excl,
+ hilite_fanins=options.hilite_fanins,
+ hilite_fanouts=options.hilite_fanouts)
+
+ if len(args) > 0:
+ PrintUsage()
+ return 1
+
+ print 'Using base directory: ', deps_grapher.base_directory
+ print 'include nodes : ', options.incl
+ print 'exclude nodes : ', options.excl
+ print 'highlight fanins of : ', options.hilite_fanins
+ print 'highlight fanouts of: ', options.hilite_fanouts
+
+ deps_grapher.DumpDependencies()
+ return 0
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/tools/checkdeps/java_checker.py b/tools/checkdeps/java_checker.py
new file mode 100644
index 0000000..1d5cecf
--- /dev/null
+++ b/tools/checkdeps/java_checker.py
@@ -0,0 +1,107 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks Java files for illegal imports."""
+
+import codecs
+import os
+import re
+
+import results
+from rules import Rule
+
+
+class JavaChecker(object):
+ """Import checker for Java files.
+
+ The CheckFile method uses real filesystem paths, but Java imports work in
+ terms of package names. To deal with this, we have an extra "prescan" pass
+ that reads all the .java files and builds a mapping of class name -> filepath.
+ In CheckFile, we convert each import statement into a real filepath, and check
+ that against the rules in the DEPS files.
+
+ Note that in Java you can always use classes in the same directory without an
+ explicit import statement, so these imports can't be blocked with DEPS files.
+ But that shouldn't be a problem, because same-package imports are pretty much
+ always correct by definition. (If we find a case where this is *not* correct,
+ it probably means the package is too big and needs to be split up.)
+
+ Properties:
+ _classmap: dict of fully-qualified Java class name -> filepath
+ """
+
+ EXTENSIONS = ['.java']
+
+ def __init__(self, base_directory, verbose):
+ self._base_directory = base_directory
+ self._verbose = verbose
+ self._classmap = {}
+ self._PrescanFiles()
+
+ def _PrescanFiles(self):
+ for root, dirs, files in os.walk(self._base_directory):
+ # Skip unwanted subdirectories. TODO(husky): it would be better to do
+ # this via the skip_child_includes flag in DEPS files. Maybe hoist this
+ # prescan logic into checkdeps.py itself?
+ for d in dirs:
+ # Skip hidden directories.
+ if d.startswith('.'):
+ dirs.remove(d)
+ # Skip the "out" directory, as dealing with generated files is awkward.
+ # We don't want paths like "out/Release/lib.java" in our DEPS files.
+ # TODO(husky): We need some way of determining the "real" path to
+ # a generated file -- i.e., where it would be in source control if
+ # it weren't generated.
+ if d == 'out':
+ dirs.remove(d)
+ # Skip third-party directories.
+ if d in ('third_party', 'ThirdParty'):
+ dirs.remove(d)
+ for f in files:
+ if f.endswith('.java'):
+ self._PrescanFile(os.path.join(root, f))
+
+ def _PrescanFile(self, filepath):
+ if self._verbose:
+ print 'Prescanning: ' + filepath
+ with codecs.open(filepath, encoding='utf-8') as f:
+ short_class_name, _ = os.path.splitext(os.path.basename(filepath))
+ for line in f:
+ for package in re.findall('^package\s+([\w\.]+);', line):
+ full_class_name = package + '.' + short_class_name
+ if full_class_name in self._classmap:
+ print 'WARNING: multiple definitions of %s:' % full_class_name
+ print ' ' + filepath
+ print ' ' + self._classmap[full_class_name]
+ print
+ else:
+ self._classmap[full_class_name] = filepath
+ return
+ print 'WARNING: no package definition found in %s' % filepath
+
+ def CheckFile(self, rules, filepath):
+ if self._verbose:
+ print 'Checking: ' + filepath
+
+ dependee_status = results.DependeeStatus(filepath)
+ with codecs.open(filepath, encoding='utf-8') as f:
+ for line in f:
+ for clazz in re.findall('^import\s+(?:static\s+)?([\w\.]+)\s*;', line):
+ if clazz not in self._classmap:
+ # Importing a class from outside the Chromium tree. That's fine --
+ # it's probably a Java or Android system class.
+ continue
+ include_path = os.path.relpath(
+ self._classmap[clazz], self._base_directory)
+ # Convert Windows paths to Unix style, as used in DEPS files.
+ include_path = include_path.replace(os.path.sep, '/')
+ rule = rules.RuleApplyingTo(include_path, filepath)
+ if rule.allow == Rule.DISALLOW:
+ dependee_status.AddViolation(
+ results.DependencyViolation(include_path, rule, rules))
+ if '{' in line:
+ # This is code, so we're finished reading imports for this file.
+ break
+
+ return dependee_status
diff --git a/tools/checkdeps/results.py b/tools/checkdeps/results.py
new file mode 100644
index 0000000..6f69514
--- /dev/null
+++ b/tools/checkdeps/results.py
@@ -0,0 +1,178 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Results object and results formatters for checkdeps tool."""
+
+
+import json
+
+
+class DependencyViolation(object):
+ """A single dependency violation."""
+
+ def __init__(self, include_path, violated_rule, rules):
+ # The include or import path that is in violation of a rule.
+ self.include_path = include_path
+
+ # The violated rule.
+ self.violated_rule = violated_rule
+
+ # The set of rules containing self.violated_rule.
+ self.rules = rules
+
+
+class DependeeStatus(object):
+ """Results object for a dependee file."""
+
+ def __init__(self, dependee_path):
+ # Path of the file whose nonconforming dependencies are listed in
+ # self.violations.
+ self.dependee_path = dependee_path
+
+ # List of DependencyViolation objects that apply to the dependee
+ # file. May be empty.
+ self.violations = []
+
+ def AddViolation(self, violation):
+ """Adds a violation."""
+ self.violations.append(violation)
+
+ def HasViolations(self):
+ """Returns True if this dependee is violating one or more rules."""
+ return not not self.violations
+
+
+class ResultsFormatter(object):
+ """Base class for results formatters."""
+
+ def AddError(self, dependee_status):
+ """Add a formatted result to |self.results| for |dependee_status|,
+ which is guaranteed to return True for
+ |dependee_status.HasViolations|.
+ """
+ raise NotImplementedError()
+
+ def GetResults(self):
+ """Returns the results. May be overridden e.g. to process the
+ results that have been accumulated.
+ """
+ raise NotImplementedError()
+
+ def PrintResults(self):
+ """Prints the results to stdout."""
+ raise NotImplementedError()
+
+
+class NormalResultsFormatter(ResultsFormatter):
+ """A results formatting object that produces the classical,
+ detailed, human-readable output of the checkdeps tool.
+ """
+
+ def __init__(self, verbose):
+ self.results = []
+ self.verbose = verbose
+
+ def AddError(self, dependee_status):
+ lines = []
+ lines.append('\nERROR in %s' % dependee_status.dependee_path)
+ for violation in dependee_status.violations:
+ lines.append(self.FormatViolation(violation, self.verbose))
+ self.results.append('\n'.join(lines))
+
+ @staticmethod
+ def FormatViolation(violation, verbose=False):
+ lines = []
+ if verbose:
+ lines.append(' For %s' % violation.rules)
+ lines.append(
+ ' Illegal include: "%s"\n Because of %s' %
+ (violation.include_path, str(violation.violated_rule)))
+ return '\n'.join(lines)
+
+ def GetResults(self):
+ return self.results
+
+ def PrintResults(self):
+ for result in self.results:
+ print result
+ if self.results:
+ print '\nFAILED\n'
+
+
+class JSONResultsFormatter(ResultsFormatter):
+ """A results formatter that outputs results to a file as JSON."""
+
+ def __init__(self, output_path, wrapped_formatter=None):
+ self.output_path = output_path
+ self.wrapped_formatter = wrapped_formatter
+
+ self.results = []
+
+ def AddError(self, dependee_status):
+ self.results.append({
+ 'dependee_path': dependee_status.dependee_path,
+ 'violations': [{
+ 'include_path': violation.include_path,
+ 'violated_rule': violation.violated_rule.AsDependencyTuple(),
+ } for violation in dependee_status.violations]
+ })
+
+ if self.wrapped_formatter:
+ self.wrapped_formatter.AddError(dependee_status)
+
+ def GetResults(self):
+ with open(self.output_path, 'w') as f:
+ f.write(json.dumps(self.results))
+
+ return self.results
+
+ def PrintResults(self):
+ if self.wrapped_formatter:
+ self.wrapped_formatter.PrintResults()
+ return
+
+ print self.results
+
+
+class TemporaryRulesFormatter(ResultsFormatter):
+ """A results formatter that produces a single line per nonconforming
+ include. The combined output is suitable for directly pasting into a
+ DEPS file as a list of temporary-allow rules.
+ """
+
+ def __init__(self):
+ self.violations = set()
+
+ def AddError(self, dependee_status):
+ for violation in dependee_status.violations:
+ self.violations.add(violation.include_path)
+
+ def GetResults(self):
+ return [' "!%s",' % path for path in sorted(self.violations)]
+
+ def PrintResults(self):
+ for result in self.GetResults():
+ print result
+
+
+class CountViolationsFormatter(ResultsFormatter):
+ """A results formatter that produces a number, the count of #include
+ statements that are in violation of the dependency rules.
+
+ Note that you normally want to instantiate DepsChecker with
+ ignore_temp_rules=True when you use this formatter.
+ """
+
+ def __init__(self):
+ self.count = 0
+
+ def AddError(self, dependee_status):
+ self.count += len(dependee_status.violations)
+
+ def GetResults(self):
+ return '%d' % self.count
+
+ def PrintResults(self):
+ print self.count
diff --git a/tools/checkdeps/rules.py b/tools/checkdeps/rules.py
new file mode 100644
index 0000000..9dfdc4a
--- /dev/null
+++ b/tools/checkdeps/rules.py
@@ -0,0 +1,178 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base classes to represent dependency rules, used by checkdeps.py"""
+
+
+import os
+import re
+
+
+class Rule(object):
+ """Specifies a single rule for an include, which can be one of
+ ALLOW, DISALLOW and TEMP_ALLOW.
+ """
+
+ # These are the prefixes used to indicate each type of rule. These
+ # are also used as values for self.allow to indicate which type of
+ # rule this is.
+ ALLOW = '+'
+ DISALLOW = '-'
+ TEMP_ALLOW = '!'
+
+ def __init__(self, allow, directory, dependent_directory, source):
+ self.allow = allow
+ self._dir = directory
+ self._dependent_dir = dependent_directory
+ self._source = source
+
+ def __str__(self):
+ return '"%s%s" from %s.' % (self.allow, self._dir, self._source)
+
+ def AsDependencyTuple(self):
+ """Returns a tuple (allow, dependent dir, dependee dir) for this rule,
+ which is fully self-sufficient to answer the question whether the dependent
+ is allowed to depend on the dependee, without knowing the external
+ context."""
+ return self.allow, self._dependent_dir or '.', self._dir or '.'
+
+ def ParentOrMatch(self, other):
+ """Returns true if the input string is an exact match or is a parent
+ of the current rule. For example, the input "foo" would match "foo/bar"."""
+ return self._dir == other or self._dir.startswith(other + '/')
+
+ def ChildOrMatch(self, other):
+ """Returns true if the input string would be covered by this rule. For
+ example, the input "foo/bar" would match the rule "foo"."""
+ return self._dir == other or other.startswith(self._dir + '/')
+
+
+class MessageRule(Rule):
+ """A rule that has a simple message as the reason for failing,
+ unrelated to directory or source.
+ """
+
+ def __init__(self, reason):
+ super(MessageRule, self).__init__(Rule.DISALLOW, '', '', '')
+ self._reason = reason
+
+ def __str__(self):
+ return self._reason
+
+
+def ParseRuleString(rule_string, source):
+ """Returns a tuple of a character indicating what type of rule this
+ is, and a string holding the path the rule applies to.
+ """
+ if not rule_string:
+ raise Exception('The rule string "%s" is empty\nin %s' %
+ (rule_string, source))
+
+ if not rule_string[0] in [Rule.ALLOW, Rule.DISALLOW, Rule.TEMP_ALLOW]:
+ raise Exception(
+ 'The rule string "%s" does not begin with a "+", "-" or "!".' %
+ rule_string)
+
+ return rule_string[0], rule_string[1:]
+
+
+class Rules(object):
+ """Sets of rules for files in a directory.
+
+ By default, rules are added to the set of rules applicable to all
+ dependee files in the directory. Rules may also be added that apply
+ only to dependee files whose filename (last component of their path)
+ matches a given regular expression; hence there is one additional
+ set of rules per unique regular expression.
+ """
+
+ def __init__(self):
+ """Initializes the current rules with an empty rule list for all
+ files.
+ """
+ # We keep the general rules out of the specific rules dictionary,
+ # as we need to always process them last.
+ self._general_rules = []
+
+ # Keys are regular expression strings, values are arrays of rules
+ # that apply to dependee files whose basename matches the regular
+ # expression. These are applied before the general rules, but
+ # their internal order is arbitrary.
+ self._specific_rules = {}
+
+ def __str__(self):
+ result = ['Rules = {\n (apply to all files): [\n%s\n ],' % '\n'.join(
+ ' %s' % x for x in self._general_rules)]
+ for regexp, rules in self._specific_rules.iteritems():
+ result.append(' (limited to files matching %s): [\n%s\n ]' % (
+ regexp, '\n'.join(' %s' % x for x in rules)))
+ result.append(' }')
+ return '\n'.join(result)
+
+ def AsDependencyTuples(self, include_general_rules, include_specific_rules):
+ """Returns a list of tuples (allow, dependent dir, dependee dir) for the
+ specified rules (general/specific). Currently only general rules are
+ supported."""
+ def AddDependencyTuplesImpl(deps, rules, extra_dependent_suffix=""):
+ for rule in rules:
+ (allow, dependent, dependee) = rule.AsDependencyTuple()
+ tup = (allow, dependent + extra_dependent_suffix, dependee)
+ deps.add(tup)
+
+ deps = set()
+ if include_general_rules:
+ AddDependencyTuplesImpl(deps, self._general_rules)
+ if include_specific_rules:
+ for regexp, rules in self._specific_rules.iteritems():
+ AddDependencyTuplesImpl(deps, rules, "/" + regexp)
+ return deps
+
+ def AddRule(self, rule_string, dependent_dir, source, dependee_regexp=None):
+ """Adds a rule for the given rule string.
+
+ Args:
+ rule_string: The include_rule string read from the DEPS file to apply.
+ source: A string representing the location of that string (filename, etc.)
+ so that we can give meaningful errors.
+ dependent_dir: The directory to which this rule applies.
+ dependee_regexp: The rule will only be applied to dependee files
+ whose filename (last component of their path)
+ matches the expression. None to match all
+ dependee files.
+ """
+ rule_type, rule_dir = ParseRuleString(rule_string, source)
+
+ if not dependee_regexp:
+ rules_to_update = self._general_rules
+ else:
+ if dependee_regexp in self._specific_rules:
+ rules_to_update = self._specific_rules[dependee_regexp]
+ else:
+ rules_to_update = []
+
+ # Remove any existing rules or sub-rules that apply. For example, if we're
+ # passed "foo", we should remove "foo", "foo/bar", but not "foobar".
+ rules_to_update = [x for x in rules_to_update
+ if not x.ParentOrMatch(rule_dir)]
+ rules_to_update.insert(0, Rule(rule_type, rule_dir, dependent_dir, source))
+
+ if not dependee_regexp:
+ self._general_rules = rules_to_update
+ else:
+ self._specific_rules[dependee_regexp] = rules_to_update
+
+ def RuleApplyingTo(self, include_path, dependee_path):
+ """Returns the rule that applies to |include_path| for a dependee
+ file located at |dependee_path|.
+ """
+ dependee_filename = os.path.basename(dependee_path)
+ for regexp, specific_rules in self._specific_rules.iteritems():
+ if re.match(regexp, dependee_filename):
+ for rule in specific_rules:
+ if rule.ChildOrMatch(include_path):
+ return rule
+ for rule in self._general_rules:
+ if rule.ChildOrMatch(include_path):
+ return rule
+ return MessageRule('no rule applying.')
diff --git a/tools/checkdeps/testdata/DEPS b/tools/checkdeps/testdata/DEPS
new file mode 100644
index 0000000..f0657f5
--- /dev/null
+++ b/tools/checkdeps/testdata/DEPS
@@ -0,0 +1,8 @@
+include_rules = [
+ "-tools/checkdeps/testdata/disallowed",
+ "+tools/checkdeps/testdata/allowed",
+ "-third_party/explicitly_disallowed",
+]
+skip_child_includes = [
+ "checkdeps_test",
+]
diff --git a/tools/checkdeps/testdata/allowed/DEPS b/tools/checkdeps/testdata/allowed/DEPS
new file mode 100644
index 0000000..8fb0905
--- /dev/null
+++ b/tools/checkdeps/testdata/allowed/DEPS
@@ -0,0 +1,12 @@
+include_rules = [
+ "+tools/checkdeps/testdata/disallowed/allowed",
+ "!tools/checkdeps/testdata/disallowed/temporarily_allowed.h",
+ "+third_party/allowed_may_use",
+]
+
+specific_include_rules = {
+ ".*_unittest\.cc": [
+ "+tools/checkdeps/testdata/disallowed/teststuff",
+ "!tools/checkdeps/testdata/bongo/temp_allowed_for_tests.h",
+ ]
+}
diff --git a/tools/checkdeps/testdata/allowed/foo_unittest.cc b/tools/checkdeps/testdata/allowed/foo_unittest.cc
new file mode 100644
index 0000000..027adf8
--- /dev/null
+++ b/tools/checkdeps/testdata/allowed/foo_unittest.cc
@@ -0,0 +1,5 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/checkdeps/testdata/disallowed/teststuff/good.h"
diff --git a/tools/checkdeps/testdata/allowed/not_a_test.cc b/tools/checkdeps/testdata/allowed/not_a_test.cc
new file mode 100644
index 0000000..57fa942
--- /dev/null
+++ b/tools/checkdeps/testdata/allowed/not_a_test.cc
@@ -0,0 +1,5 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/checkdeps/testdata/disallowed/teststuff/bad.h"
diff --git a/tools/checkdeps/testdata/allowed/test.h b/tools/checkdeps/testdata/allowed/test.h
new file mode 100644
index 0000000..b78bb2d
--- /dev/null
+++ b/tools/checkdeps/testdata/allowed/test.h
@@ -0,0 +1,11 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/checkdeps/testdata/allowed/good.h"
+#include "tools/checkdeps/testdata/disallowed/bad.h"
+#include "tools/checkdeps/testdata/disallowed/allowed/good.h"
+#include "tools/checkdeps/testdata/disallowed/temporarily_allowed.h"
+#include "third_party/explicitly_disallowed/bad.h"
+#include "third_party/allowed_may_use/good.h"
+#include "third_party/no_rule/bad.h"
diff --git a/tools/checkdeps/testdata/checkdeps_test/DEPS b/tools/checkdeps/testdata/checkdeps_test/DEPS
new file mode 100644
index 0000000..91a9b99
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "-disallowed",
+ "+allowed",
+ "-third_party/explicitly_disallowed",
+]
diff --git a/tools/checkdeps/testdata/checkdeps_test/allowed/DEPS b/tools/checkdeps/testdata/checkdeps_test/allowed/DEPS
new file mode 100644
index 0000000..14aa4d4
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/allowed/DEPS
@@ -0,0 +1,11 @@
+include_rules = [
+ "+disallowed/allowed",
+ "!disallowed/temporarily_allowed.h",
+ "+third_party/allowed_may_use",
+]
+
+specific_include_rules = {
+ ".*_unittest\.cc": [
+ "+disallowed/teststuff",
+ ]
+}
diff --git a/tools/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc b/tools/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc
new file mode 100644
index 0000000..68beabf
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/allowed/foo_unittest.cc
@@ -0,0 +1,5 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "disallowed/teststuff/good.h"
diff --git a/tools/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc b/tools/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc
new file mode 100644
index 0000000..9e5e0cf
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/allowed/not_a_test.cc
@@ -0,0 +1,5 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "disallowed/teststuff/bad.h"
diff --git a/tools/checkdeps/testdata/checkdeps_test/allowed/test.h b/tools/checkdeps/testdata/checkdeps_test/allowed/test.h
new file mode 100644
index 0000000..f8e4e65
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/allowed/test.h
@@ -0,0 +1,11 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "allowed/good.h"
+#include "disallowed/bad.h"
+#include "disallowed/allowed/good.h"
+#include "disallowed/temporarily_allowed.h"
+#include "third_party/explicitly_disallowed/bad.h"
+#include "third_party/allowed_may_use/good.h"
+#include "third_party/no_rule/bad.h"
diff --git a/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS b/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS
new file mode 100644
index 0000000..2be72b8
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/DEPS
@@ -0,0 +1,3 @@
+skip_child_includes = [
+ "skipped",
+]
diff --git a/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h b/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h
new file mode 100644
index 0000000..96fde19
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/skipped/test.h
@@ -0,0 +1,5 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "whatever/whocares/ok.h"
diff --git a/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h b/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h
new file mode 100644
index 0000000..54f08c3
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/disallowed/allowed/test.h
@@ -0,0 +1,11 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "allowed/good.h"
+// Always allowed to include self and parents.
+#include "disallowed/good.h"
+#include "disallowed/allowed/good.h"
+#include "third_party/explicitly_disallowed/bad.h"
+#include "third_party/allowed_may_use/bad.h"
+#include "third_party/no_rule/bad.h"
diff --git a/tools/checkdeps/testdata/checkdeps_test/disallowed/test.h b/tools/checkdeps/testdata/checkdeps_test/disallowed/test.h
new file mode 100644
index 0000000..15c4d5e
--- /dev/null
+++ b/tools/checkdeps/testdata/checkdeps_test/disallowed/test.h
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "allowed/good.h"
+// Always allowed to include self.
+#include "disallowed/good.h"
+#include "disallowed/allowed/good.h"
+#include "third_party/explicitly_disallowed/bad.h"
+// Only allowed for code under allowed/.
+#include "third_party/allowed_may_use/bad.h"
+#include "third_party/no_rule/bad.h"
diff --git a/tools/checkdeps/testdata/disallowed/allowed/DEPS b/tools/checkdeps/testdata/disallowed/allowed/DEPS
new file mode 100644
index 0000000..2be72b8
--- /dev/null
+++ b/tools/checkdeps/testdata/disallowed/allowed/DEPS
@@ -0,0 +1,3 @@
+skip_child_includes = [
+ "skipped",
+]
diff --git a/tools/checkdeps/testdata/disallowed/allowed/skipped/test.h b/tools/checkdeps/testdata/disallowed/allowed/skipped/test.h
new file mode 100644
index 0000000..96fde19
--- /dev/null
+++ b/tools/checkdeps/testdata/disallowed/allowed/skipped/test.h
@@ -0,0 +1,5 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "whatever/whocares/ok.h"
diff --git a/tools/checkdeps/testdata/disallowed/allowed/test.h b/tools/checkdeps/testdata/disallowed/allowed/test.h
new file mode 100644
index 0000000..3d46a5e
--- /dev/null
+++ b/tools/checkdeps/testdata/disallowed/allowed/test.h
@@ -0,0 +1,11 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/checkdeps/testdata/allowed/good.h"
+// Always allowed to include self and parents.
+#include "tools/checkdeps/testdata/disallowed/good.h"
+#include "tools/checkdeps/testdata/disallowed/allowed/good.h"
+#include "third_party/explicitly_disallowed/bad.h"
+#include "third_party/allowed_may_use/bad.h"
+#include "third_party/no_rule/bad.h"
diff --git a/tools/checkdeps/testdata/disallowed/foo_unittest.cc b/tools/checkdeps/testdata/disallowed/foo_unittest.cc
new file mode 100644
index 0000000..1186ccfe
--- /dev/null
+++ b/tools/checkdeps/testdata/disallowed/foo_unittest.cc
@@ -0,0 +1,10 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Not allowed for code under disallowed/ but temporarily allowed
+// specifically for test code under allowed/. This regression tests a
+// bug where we were taking shallow copies of rules when generating
+// rules for subdirectories, so all rule objects were getting the same
+// dictionary for specific rules.
+#include "tools/checkdeps/testdata/disallowed/temp_allowed_for_tests.h"
diff --git a/tools/checkdeps/testdata/disallowed/test.h b/tools/checkdeps/testdata/disallowed/test.h
new file mode 100644
index 0000000..59d7121
--- /dev/null
+++ b/tools/checkdeps/testdata/disallowed/test.h
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/checkdeps/testdata/allowed/good.h"
+// Always allowed to include self.
+#include "tools/checkdeps/testdata/disallowed/good.h"
+#include "tools/checkdeps/testdata/disallowed/allowed/good.h"
+#include "third_party/explicitly_disallowed/bad.h"
+// Only allowed for code under allowed/.
+#include "third_party/allowed_may_use/bad.h"
+#include "third_party/no_rule/bad.h"