summaryrefslogtreecommitdiffstats
path: root/tools/valgrind
diff options
context:
space:
mode:
authordkegel@google.com <dkegel@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2009-05-04 17:53:41 +0000
committerdkegel@google.com <dkegel@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2009-05-04 17:53:41 +0000
commit0bc189ec6ddffa79c1ce804d90a50fd550571093 (patch)
tree933826db24ffdc031714c8dd644bc0f0779aed3a /tools/valgrind
parent360a8b838101747d7550129ba107d0b49aaa96e5 (diff)
downloadchromium_src-0bc189ec6ddffa79c1ce804d90a50fd550571093.zip
chromium_src-0bc189ec6ddffa79c1ce804d90a50fd550571093.tar.gz
chromium_src-0bc189ec6ddffa79c1ce804d90a50fd550571093.tar.bz2
Don't analyze incomplete valgrind log files (as the parser crashes),
make timeout global instead of per-file, shorten it. Review URL: http://codereview.chromium.org/100340 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@15224 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools/valgrind')
-rwxr-xr-xtools/valgrind/valgrind_analyze.py31
1 files changed, 20 insertions, 11 deletions
diff --git a/tools/valgrind/valgrind_analyze.py b/tools/valgrind/valgrind_analyze.py
index a8e2012..e4eb187 100755
--- a/tools/valgrind/valgrind_analyze.py
+++ b/tools/valgrind/valgrind_analyze.py
@@ -189,24 +189,33 @@ class ValgrindAnalyze:
'''
self._errors = set()
+ badfiles = set()
+ start = time.time()
for file in files:
- # Wait up to ten minutes for valgrind to finish writing.
+ # Wait up to three minutes for valgrind to finish writing all files,
+ # but after that, just skip incomplete files and warn.
f = open(file, "r")
- ntries = 600
- for tries in range(0, ntries):
+ found = False
+ firstrun = True
+ while (firstrun or ((time.time() - start) < 180.0)):
+ firstrun = False
f.seek(0)
if sum((1 for line in f if '</valgrindoutput>' in line)) > 0:
+ found = True
break
time.sleep(1)
f.close()
- if tries == ntries-1:
- logging.error("valgrind never finished?")
- raw_errors = parse(file).getElementsByTagName("error")
- for raw_error in raw_errors:
- # Ignore "possible" leaks for now by default.
- if (show_all_leaks or
- getTextOf(raw_error, "kind") != "Leak_PossiblyLost"):
- self._errors.add(ValgrindError(source_dir, raw_error))
+ if not found:
+ badfiles.add(file)
+ else:
+ raw_errors = parse(file).getElementsByTagName("error")
+ for raw_error in raw_errors:
+ # Ignore "possible" leaks for now by default.
+ if (show_all_leaks or
+ getTextOf(raw_error, "kind") != "Leak_PossiblyLost"):
+ self._errors.add(ValgrindError(source_dir, raw_error))
+ if len(badfiles) > 0:
+ logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles))
def Report(self):
if self._errors: