summaryrefslogtreecommitdiffstats
path: root/tools/findit
diff options
context:
space:
mode:
authorstgao <stgao@chromium.org>2014-08-26 14:53:20 -0700
committerCommit bot <commit-bot@chromium.org>2014-08-26 21:54:30 +0000
commit6b6cd502e50d93232afee8f993dc47a563f74c84 (patch)
tree59bdc17fa1b65da8ea18ad90587acde53881472f /tools/findit
parent54c3719d6bd03e7f2a644b8ce611e68a67bc341f (diff)
downloadchromium_src-6b6cd502e50d93232afee8f993dc47a563f74c84.zip
chromium_src-6b6cd502e50d93232afee8f993dc47a563f74c84.tar.gz
chromium_src-6b6cd502e50d93232afee8f993dc47a563f74c84.tar.bz2
[Findit] Improve output format and cherry-pick bugs fix.
1. For DEPS file parsing, switching from svn to git. 2. For output of testcase https://cluster-fuzz.appspot.com/testcase?key=5097237064450048, now it is: Lines 1103-1112 of file ThreadState.cpp which potentially caused crash are changed in this cl (frame #2, function "blink::ThreadState::performPendingSweep()"). Lines 1120-1136, 1170-1174 of file Heap.cpp which potentially caused crash are changed in this cl (frame #0, function "blink::ThreadHeap<blink::FinalizedHeapObjectHeader>::sweepNormalPages(blink::HeapStats*)"; frame #1, function "blink::ThreadHeap<blink::FinalizedHeapObjectHeader>::sweep(blink::HeapStats*)"). Minimum distance from crashed line to changed line: 0. (File: ThreadState.cpp, Crashed on: 1100, Changed: 1100). NOTRY=true Review URL: https://codereview.chromium.org/504443004 Cr-Commit-Position: refs/heads/master@{#291987}
Diffstat (limited to 'tools/findit')
-rw-r--r--tools/findit/chromium_deps.py88
-rw-r--r--tools/findit/chromium_deps_unittest.py57
-rw-r--r--tools/findit/common/http_client_local.py17
-rw-r--r--tools/findit/common/utils.py37
-rw-r--r--tools/findit/crash_utils.py35
-rw-r--r--tools/findit/deps_config.json31
-rw-r--r--tools/findit/findit_for_crash.py19
-rw-r--r--tools/findit/git_repository_parser.py16
8 files changed, 199 insertions, 101 deletions
diff --git a/tools/findit/chromium_deps.py b/tools/findit/chromium_deps.py
index e2b0b7c..97d31cf 100644
--- a/tools/findit/chromium_deps.py
+++ b/tools/findit/chromium_deps.py
@@ -66,20 +66,29 @@ def _GetComponentName(path, host_dirs):
return '_'.join(path.split('/'))
-def _GetContentOfDEPS(url, retries=5, sleep_time=0.1):
+def _GetContentOfDEPS(revision, retries=5, sleep_time=0.1):
+ chromium_git_file_url_template = CONFIG['chromium_git_file_url']
+
+ deps_file_name = '.DEPS.git'
count = 0
while True:
count += 1
- try:
- _, content = utils.GetHttpClient().Get(url, timeout=60)
- return content
-
- # TODO(jeun): Handle HTTP Errors, such as 404.
- except urllib2.HTTPError:
- if count < retries:
- time.sleep(sleep_time)
- else:
- break
+
+ url = chromium_git_file_url_template % (revision, deps_file_name)
+ http_status_code, content = utils.GetHttpClient().Get(url, timeout=60)
+
+ if http_status_code == 404 and deps_file_name != 'DEPS':
+ deps_file_name = 'DEPS'
+ count = 0
+ continue
+ elif http_status_code == 200:
+ # Googlesource git returns text file encoded in base64, so decode it.
+ return base64.b64decode(content)
+
+ if count < retries:
+ time.sleep(sleep_time)
+ else:
+ break
return ''
@@ -90,7 +99,7 @@ def GetChromiumComponents(chromium_revision,
"""Return a list of components used by Chrome of the given revision.
Args:
- chromium_revision: The revision of the Chrome build.
+ chromium_revision: Revision of the Chrome build: svn revision, or git hash.
os_platform: The target platform of the Chrome build, eg. win, mac, etc.
deps_file_downloader: A function that takes the chromium_revision as input,
and returns the content of the DEPS file. The returned
@@ -104,23 +113,21 @@ def GetChromiumComponents(chromium_revision,
if os_platform.lower() == 'linux':
os_platform = 'unix'
- git_base_url = CONFIG['git_base_url']
- git_deps_path = CONFIG['git_deps_path']
- svn_base_url = CONFIG['svn_base_url']
- svn_deps_path = CONFIG['svn_deps_path']
- svn_src_chromium_url = CONFIG['svn_src_chromium_url']
- is_git_hash = utils.IsGitHash(chromium_revision)
- if is_git_hash:
- url = git_base_url + (git_deps_path % chromium_revision)
- else:
- url = svn_base_url + (svn_deps_path % chromium_revision)
+ chromium_git_base_url = CONFIG['chromium_git_base_url']
- # Download the content of DEPS file in chromium.
- deps_content = deps_file_downloader(url)
+ if not utils.IsGitHash(chromium_revision):
+ # Convert svn revision or commit position to Git hash.
+ cr_rev_url_template = CONFIG['cr_rev_url']
+ url = cr_rev_url_template % chromium_revision
+ # TODO(stgao): Add retry in HttpClient.
+ _, content = utils.GetHttpClient().Get(url, timeout=60)
+ cr_rev_data = json.loads(content)
+ if 'git_sha' not in cr_rev_data:
+ raise Exception('Failed to convert svn revision to git hash')
+ chromium_revision = cr_rev_data['git_sha']
- # Googlesource git returns text file encoded in base64, so decode it.
- if is_git_hash:
- deps_content = base64.b64decode(deps_content)
+ # Download the content of DEPS file in chromium.
+ deps_content = deps_file_downloader(chromium_revision)
all_deps = {}
@@ -133,16 +140,14 @@ def GetChromiumComponents(chromium_revision,
# Figure out components based on the dependencies.
components = {}
host_dirs = CONFIG['host_directories']
- for component_path in all_deps:
+ for component_path, component_repo_url in all_deps.iteritems():
+ if component_repo_url is None:
+ # For some platform like iso, some component is ignored.
+ continue
+
name = _GetComponentName(component_path, host_dirs)
- repository, revision = all_deps[component_path].split('@')
+ repository, revision = component_repo_url.split('@')
is_git_hash = utils.IsGitHash(revision)
- if repository.startswith('/'):
- # In DEPS file, if a path starts with /, it is a relative path to the
- # https://src.chromium.org/chrome. Strip /trunk at the end of the base
- # url and add it to the base url.
- # TODO(stgao): Use git repo after chromium moves to git.
- repository = svn_src_chromium_url + repository
if is_git_hash:
repository_type = 'git'
else:
@@ -157,19 +162,12 @@ def GetChromiumComponents(chromium_revision,
'revision': revision
}
- # Add chromium as a component, depending on the repository type.
- if is_git_hash:
- repository = git_base_url
- repository_type = 'git'
- else:
- repository = svn_base_url
- repository_type = 'svn'
-
+ # Add chromium as a component.
components['src/'] = {
'path': 'src/',
'name': 'chromium',
- 'repository': repository,
- 'repository_type': repository_type,
+ 'repository': chromium_git_base_url,
+ 'repository_type': 'git',
'revision': chromium_revision
}
diff --git a/tools/findit/chromium_deps_unittest.py b/tools/findit/chromium_deps_unittest.py
index a26ac9c..7f64a12 100644
--- a/tools/findit/chromium_deps_unittest.py
+++ b/tools/findit/chromium_deps_unittest.py
@@ -5,6 +5,7 @@
import unittest
import chromium_deps
+from common import utils
class ChromiumDEPSTest(unittest.TestCase):
@@ -38,12 +39,13 @@ deps_os = {
def testGetChromiumComponents(self):
chromium_revision = '283296'
+ chromium_revision_git_hash = 'b041fda2e8493dcb26aac08deb493943df240cbb'
webkit_revision = '178200'
breakpad_revision = '1345'
liblouis_commit_hashcode = '3c2daee56250162e5a75830871601d74328d39f5'
def _GetContentOfDEPS(chromium_revision_tmp):
- self.assertEqual(chromium_revision_tmp, chromium_revision)
+ self.assertEqual(chromium_revision_tmp, chromium_revision_git_hash)
return self.DEPS_TEMPLATE % (webkit_revision, breakpad_revision,
liblouis_commit_hashcode)
@@ -65,10 +67,10 @@ deps_os = {
},
'src/': {
'path': 'src/',
- 'repository_type': 'svn',
+ 'repository_type': 'git',
'name': 'chromium',
- 'repository': 'https://src.chromium.org/chrome/trunk',
- 'revision': chromium_revision
+ 'repository': 'https://chromium.googlesource.com/chromium/src/',
+ 'revision': chromium_revision_git_hash
},
'src/third_party/WebKit/': {
'path': 'src/third_party/WebKit/',
@@ -85,22 +87,24 @@ deps_os = {
def testGetChromiumComponentRange(self):
chromium_revision1 = '283200'
+ chromium_revision_git_hash1 = 'c53c387f46a2ff0cf7c072222b826cff0817a80f'
webkit_revision1 = '178084'
breakpad_revision1 = '1345'
liblouis_commit_hashcode1 = '3c2daee56250162e5a75830871601d74328d39f5'
chromium_revision2 = '283296'
+ chromium_revision_git_hash2 = 'b041fda2e8493dcb26aac08deb493943df240cbb'
webkit_revision2 = '178200'
breakpad_revision2 = '1345'
liblouis_commit_hashcode2 = '3c2daee56250162e5a75830871601d74328d39f5'
def _GetContentOfDEPS(chromium_revision):
chromium_revision = str(chromium_revision)
- if chromium_revision == chromium_revision1:
+ if chromium_revision == chromium_revision_git_hash1:
return self.DEPS_TEMPLATE % (webkit_revision1, breakpad_revision1,
liblouis_commit_hashcode1)
else:
- self.assertEqual(chromium_revision2, chromium_revision)
+ self.assertEqual(chromium_revision, chromium_revision_git_hash2)
return self.DEPS_TEMPLATE % (webkit_revision2, breakpad_revision2,
liblouis_commit_hashcode2)
@@ -125,13 +129,13 @@ deps_os = {
'repository_type': 'git'
},
'src/': {
- 'old_revision': chromium_revision1,
+ 'old_revision': chromium_revision_git_hash1,
'name': 'chromium',
- 'repository': 'https://src.chromium.org/chrome/trunk',
+ 'repository': 'https://chromium.googlesource.com/chromium/src/',
'rolled': True,
- 'new_revision': chromium_revision2,
+ 'new_revision': chromium_revision_git_hash2,
'path': 'src/',
- 'repository_type': 'svn'
+ 'repository_type': 'git'
},
'src/third_party/WebKit/': {
'old_revision': webkit_revision1,
@@ -149,6 +153,37 @@ deps_os = {
deps_file_downloader=_GetContentOfDEPS)
self.assertEqual(expected_results, components)
+ def _VerifyGitHashForAllComponents(self, deps):
+ self.assertTrue(deps)
+ self.assertTrue(isinstance(deps, dict))
+ for component in deps.values():
+ for key in ['revision', 'old_revision', 'new_revision']:
+ if key in component:
+ self.assertTrue(utils.IsGitHash(component[key]))
+
+ def testComponentRangeCrossGitMigrationPoint(self):
+ # The old revision is from svn.
+ # The new revision is from git.
+ deps = chromium_deps.GetChromiumComponentRange(
+ '291440',
+ '744746cc51ef81c8f8d727fafa46b14d1c03fe44')
+ self._VerifyGitHashForAllComponents(deps)
+
def testGetSvnRevision(self):
+ # For this case, svn revision needs converting to git hash and there will be
+ # .DEPS.git and DEPS.
deps = chromium_deps.GetChromiumComponents(284750)
- self.assertTrue(isinstance(deps, dict))
+ self._VerifyGitHashForAllComponents(deps)
+
+ def testGetGitRevisionWithoutDEPS_dot_GIT(self):
+ # For this case, there is only DEPS, not .DEPS.git.
+ deps = chromium_deps.GetChromiumComponents(
+ 'f8b3fe9660d8dda318800f55d5e29799bbfd43f7')
+ self._VerifyGitHashForAllComponents(deps)
+
+
+ def testGetGitRevisionWithDEPS_dot_GIT(self):
+ # For this case, there will be .DEPS.git.
+ deps = chromium_deps.GetChromiumComponents(
+ '8ae88241aa9f224e8ce97250f32469d616e437aa')
+ self._VerifyGitHashForAllComponents(deps)
diff --git a/tools/findit/common/http_client_local.py b/tools/findit/common/http_client_local.py
index bf386f9..fccb0b6 100644
--- a/tools/findit/common/http_client_local.py
+++ b/tools/findit/common/http_client_local.py
@@ -205,11 +205,20 @@ def _SendRequest(url, timeout=None):
urllib2.HTTPCookieProcessor(cookielib.MozillaCookieJar(cookie_file)))
url_opener = urllib2.build_opener(*handlers)
- if timeout is not None:
+
+ status_code = None
+ content = None
+
+ try:
response = url_opener.open(url, timeout=timeout)
- else:
- response = url_opener.open(url)
- return response.code, response.read()
+
+ status_code = response.code
+ content = response.read()
+ except urllib2.HTTPError as e:
+ status_code = e.code
+ content = None
+
+ return status_code, content
class HttpClientLocal(http_client.HttpClient):
diff --git a/tools/findit/common/utils.py b/tools/findit/common/utils.py
index 363048c..5011e76 100644
--- a/tools/findit/common/utils.py
+++ b/tools/findit/common/utils.py
@@ -29,3 +29,40 @@ def IsGitHash(revision):
def GetHttpClient():
# TODO(stgao): return implementation for appengine when running on appengine.
return HttpClientLocal
+
+
+def JoinLineNumbers(line_numbers, accepted_gap=1):
+ """Join line numbers into line blocks.
+
+ Args:
+ line_numbers: a list of line number.
+ accepted_gap: if two line numbers are within the give gap,
+ they would be combined together into a block.
+ Eg: for (1, 2, 3, 6, 7, 8, 12), if |accepted_gap| = 1, result
+ would be 1-3, 6-8, 12; if |accepted_gap| = 3, result would be
+ 1-8, 12; if |accepted_gap| =4, result would be 1-12.
+ """
+ if not line_numbers:
+ return ''
+
+ line_numbers = map(int, line_numbers)
+ line_numbers.sort()
+
+ block = []
+ start_line_number = line_numbers[0]
+ last_line_number = start_line_number
+ for current_line_number in line_numbers[1:]:
+ if last_line_number + accepted_gap < current_line_number:
+ if start_line_number == last_line_number:
+ block.append('%d' % start_line_number)
+ else:
+ block.append('%d-%d' % (start_line_number, last_line_number))
+ start_line_number = current_line_number
+ last_line_number = current_line_number
+ else:
+ if start_line_number == last_line_number:
+ block.append('%d' % start_line_number)
+ else:
+ block.append('%d-%d' % (start_line_number, last_line_number))
+
+ return ', '.join(block)
diff --git a/tools/findit/crash_utils.py b/tools/findit/crash_utils.py
index 65ff729..da3c081 100644
--- a/tools/findit/crash_utils.py
+++ b/tools/findit/crash_utils.py
@@ -108,7 +108,7 @@ def NormalizePath(path, parsed_deps):
repository is not supported, i.e from googlecode.
"""
# First normalize the path by retreiving the normalized path.
- normalized_path = os.path.normpath(path.replace('\\', '/'))
+ normalized_path = os.path.normpath(path).replace('\\', '/')
# Iterate through all component paths in the parsed DEPS, in the decreasing
# order of the length of the file path.
@@ -230,14 +230,19 @@ def GetDataFromURL(url, retries=10, sleep_time=0.1, timeout=5):
count += 1
# Retrieves data from URL.
try:
- _, data = utils.GetHttpClient().Get(url, timeout=timeout)
- return data
+ status_code, data = utils.GetHttpClient().Get(url, timeout=timeout)
except IOError:
- if count < retries:
- # If retrieval fails, try after sleep_time second.
- time.sleep(sleep_time)
- else:
- break
+ status_code = -1
+ data = None
+
+ if status_code == 200:
+ return data
+
+ if count < retries:
+ # If retrieval fails, try after sleep_time second.
+ time.sleep(sleep_time)
+ else:
+ break
# Return None if it fails to read data from URL 'retries' times.
return None
@@ -349,16 +354,24 @@ def AddHyperlink(text, link):
return '<a href="%s">%s</a>' % (sanitized_link, sanitized_text)
-def PrettifyList(l):
+def PrettifyList(items):
"""Returns a string representation of a list.
It adds comma in between the elements and removes the brackets.
Args:
- l: A list to prettify.
+ items: A list to prettify.
Returns:
A string representation of the list.
"""
- return str(l)[1:-1]
+ return ', '.join(map(str, items))
+
+
+def PrettifyFrameInfo(frame_indices, functions):
+ """Return a string to represent the frames with functions."""
+ frames = []
+ for frame_index, function in zip(frame_indices, functions):
+ frames.append('frame #%s, function "%s"' % (frame_index, function))
+ return '; '.join(frames)
def PrettifyFiles(file_list):
diff --git a/tools/findit/deps_config.json b/tools/findit/deps_config.json
index 203202b..57b41f9 100644
--- a/tools/findit/deps_config.json
+++ b/tools/findit/deps_config.json
@@ -1,16 +1,17 @@
{
-"svn_base_url": "https://src.chromium.org/chrome/trunk/",
-"svn_deps_path": "src/DEPS?p=%s",
-"git_base_url": "https://chromium.googlesource.com/chromium/src/",
-"git_deps_path": "+/%s/DEPS?format=text",
-"svn_src_chromium_url": "https://src.chromium.org/chrome",
-"host_directories": ["src/chrome/browser/resources/",
- "src/chrome/test/data/layout_tests/",
- "src/media/",
- "src/sdch/",
- "src/testing/",
- "src/third_party/WebKit/",
- "src/third_party/",
- "src/tools/",
- "src/"]
-} \ No newline at end of file
+ "cr_rev_url": "https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/%s",
+ "chromium_git_base_url": "https://chromium.googlesource.com/chromium/src/",
+ "chromium_git_file_url":
+ "https://chromium.googlesource.com/chromium/src/+/%s/%s?format=text",
+ "host_directories": [
+ "src/chrome/browser/resources/",
+ "src/chrome/test/data/layout_tests/",
+ "src/media/",
+ "src/sdch/",
+ "src/testing/",
+ "src/third_party/WebKit/",
+ "src/third_party/",
+ "src/tools/",
+ "src/"
+ ]
+}
diff --git a/tools/findit/findit_for_crash.py b/tools/findit/findit_for_crash.py
index c12d6b8..50f12d8 100644
--- a/tools/findit/findit_for_crash.py
+++ b/tools/findit/findit_for_crash.py
@@ -456,13 +456,11 @@ def GenerateReasonForMatches(matches):
for lines in crashed_line_numbers
for crashed_line_number in lines]
reason.append(
- 'Line %s of file %s which potentially caused the crash '
- 'according to the stacktrace, is changed in this cl'
- ' (From stack frame %s, function %s).' %
- (crash_utils.PrettifyList(crashed_line_numbers),
+ 'Lines %s of file %s which potentially caused crash '
+ 'are changed in this cl (%s).\n' %
+ (utils.JoinLineNumbers(crashed_line_numbers, accepted_gap=4),
file_name,
- crash_utils.PrettifyList(stack_frame_indices),
- crash_utils.PrettifyList(function_list)))
+ crash_utils.PrettifyFrameInfo(stack_frame_indices, function_list)))
else:
# Get all the files that are not line change.
@@ -479,9 +477,8 @@ def GenerateReasonForMatches(matches):
pretty_file_names = crash_utils.PrettifyList(file_names)
# Add the reason, break because we took care of the rest of the files.
- file_string += ('(From stack frames %s, functions %s)' %
- (crash_utils.PrettifyList(stack_frame_indices),
- crash_utils.PrettifyList(function_list)))
+ file_string += '(%s)' % crash_utils.PrettifyFrameInfo(
+ stack_frame_indices, function_list)
reason.append(file_string % pretty_file_names)
break
@@ -525,8 +522,8 @@ def CombineMatches(matches):
if match.min_distance_info:
file_name, min_crashed_line, min_changed_line = match.min_distance_info
match.reason += \
- ('\nMininimum distance from crashed line to changed line: %d. '
- '(File: %s, Crashed on: %d, Changed: %d).' %
+ ('Minimum distance from crashed line to changed line: %d. '
+ '(File: %s, Crashed on: %d, Changed: %d).\n' %
(match.min_distance, file_name, min_crashed_line, min_changed_line))
return combined_matches
diff --git a/tools/findit/git_repository_parser.py b/tools/findit/git_repository_parser.py
index d4564f8..d875c26 100644
--- a/tools/findit/git_repository_parser.py
+++ b/tools/findit/git_repository_parser.py
@@ -11,11 +11,18 @@ from repository_parser_interface import ParserInterface
FILE_CHANGE_TYPE_MAP = {
'add': 'A',
+ 'copy': 'C',
'delete': 'D',
- 'modify': 'M'
+ 'modify': 'M',
+ 'rename': 'R'
}
+def _ConvertToFileChangeType(file_action):
+ # TODO(stgao): verify impact on code that checks the file change type.
+ return file_action[0].upper()
+
+
class GitParser(ParserInterface):
"""Parser for Git repository in googlesource.
@@ -103,7 +110,7 @@ class GitParser(ParserInterface):
0].getAttribute('class')
# Normalize file action so that it is same as SVN parser.
- file_change_type = FILE_CHANGE_TYPE_MAP[file_change_type]
+ file_change_type = _ConvertToFileChangeType(file_change_type)
# Add the changed file to the map.
if file_path not in file_to_revision_map:
@@ -185,7 +192,7 @@ class GitParser(ParserInterface):
file_change_type = diff['type']
# Normalize file action so that it fits with svn_repository_parser.
- file_change_type = FILE_CHANGE_TYPE_MAP[file_change_type]
+ file_change_type = _ConvertToFileChangeType(file_change_type)
# Add the file to the map.
if file_path not in file_to_revision_map:
@@ -204,7 +211,8 @@ class GitParser(ParserInterface):
backup_url = (base_url + self.url_parts_map['revision_url']) % githash
# If the file is added (not modified), treat it as if it is not changed.
- if file_change_type == 'A':
+ if file_change_type in ('A', 'C', 'R'):
+ # TODO(stgao): Maybe return whole file change for Add, Rename, and Copy?
return (backup_url, changed_line_numbers, changed_line_contents)
# Retrieves the diff data from URL, and if it fails, return emptry lines.