summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authormaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-08-29 02:30:47 +0000
committermaruel@chromium.org <maruel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-08-29 02:30:47 +0000
commit112182a539acd0ede2e60bae5e138f5818d3dd5f (patch)
tree65838bcfc9e92794b16df844de8e82dfc816b5f8 /tools
parent8f2249b2430f3a4822889f16e664b7110425708f (diff)
downloadchromium_src-112182a539acd0ede2e60bae5e138f5818d3dd5f.zip
chromium_src-112182a539acd0ede2e60bae5e138f5818d3dd5f.tar.gz
chromium_src-112182a539acd0ede2e60bae5e138f5818d3dd5f.tar.bz2
Do not flatten entries with value set to None.
This will fix run_test_from_archive.py, which always expect valid values. By not accepting None values, this makes the format more deterministic. Make output dense and sorted to be as deterministic as possible. TBR=cmp@chromium.org,csharp@chromium.org NOTRY=true BUG= Review URL: https://chromiumcodereview.appspot.com/10886027 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@153829 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools')
-rwxr-xr-xtools/isolate/isolate.py12
-rwxr-xr-xtools/isolate/isolate_smoke_test.py3
-rwxr-xr-xtools/isolate/isolate_test.py6
-rwxr-xr-xtools/isolate/run_test_from_archive.py13
-rwxr-xr-xtools/isolate/trace_inputs.py5
5 files changed, 21 insertions, 18 deletions
diff --git a/tools/isolate/isolate.py b/tools/isolate/isolate.py
index 1d6b5e0..31ffbcf 100755
--- a/tools/isolate/isolate.py
+++ b/tools/isolate/isolate.py
@@ -364,8 +364,12 @@ class Flattenable(object):
MEMBERS = ()
def flatten(self):
- """Returns a json-serializable version of itself."""
- return dict((member, getattr(self, member)) for member in self.MEMBERS)
+ """Returns a json-serializable version of itself.
+
+ Skips None entries.
+ """
+ items = ((member, getattr(self, member)) for member in self.MEMBERS)
+ return dict((member, value) for member, value in items if value is not None)
@classmethod
def load(cls, data):
@@ -568,13 +572,13 @@ class CompleteState(object):
def save_files(self):
"""Saves both self.result and self.saved_state."""
logging.debug('Dumping to %s' % self.result_file)
- trace_inputs.write_json(self.result_file, self.result.flatten(), False)
+ trace_inputs.write_json(self.result_file, self.result.flatten(), True)
total_bytes = sum(i.get('size', 0) for i in self.result.files.itervalues())
if total_bytes:
logging.debug('Total size: %d bytes' % total_bytes)
saved_state_file = result_to_state(self.result_file)
logging.debug('Dumping to %s' % saved_state_file)
- trace_inputs.write_json(saved_state_file, self.saved_state.flatten(), False)
+ trace_inputs.write_json(saved_state_file, self.saved_state.flatten(), True)
@property
def root_dir(self):
diff --git a/tools/isolate/isolate_smoke_test.py b/tools/isolate/isolate_smoke_test.py
index 25078e8..4876ef9 100755
--- a/tools/isolate/isolate_smoke_test.py
+++ b/tools/isolate/isolate_smoke_test.py
@@ -231,9 +231,10 @@ class IsolateModeBase(IsolateBase):
"""Verifies self.result contains the expected data."""
expected = {
u'files': self._gen_files(read_only, empty_file),
- u'read_only': read_only,
u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
}
+ if read_only is not None:
+ expected[u'read_only'] = read_only
if args:
expected[u'command'] = [u'python'] + [unicode(x) for x in args]
else:
diff --git a/tools/isolate/isolate_test.py b/tools/isolate/isolate_test.py
index b1c11aa..18b9a1f 100755
--- a/tools/isolate/isolate_test.py
+++ b/tools/isolate/isolate_test.py
@@ -33,8 +33,6 @@ class Isolate(unittest.TestCase):
expected = {
'command': [],
'files': {},
- 'read_only': None,
- 'relative_cwd': None,
}
self.assertEquals(expected, isolate.Result.load(values).flatten())
@@ -43,13 +41,11 @@ class Isolate(unittest.TestCase):
'command': 'maybe',
'files': {'foo': 42},
'read_only': 2,
- 'relative_cwd': None,
}
expected = {
'command': 'maybe',
'files': {'foo': 42},
'read_only': 2,
- 'relative_cwd': None,
}
self.assertEquals(expected, isolate.Result.load(values).flatten())
@@ -72,7 +68,6 @@ class Isolate(unittest.TestCase):
values = {
}
expected = {
- 'isolate_file': None,
'variables': {},
}
self.assertEquals(expected, isolate.SavedState.load(values).flatten())
@@ -142,7 +137,6 @@ class Isolate(unittest.TestCase):
'size': self._size('isolate.py'),
},
},
- 'read_only': None,
'relative_cwd': os.path.join('data', 'isolate'),
}
if sys.platform == 'win32':
diff --git a/tools/isolate/run_test_from_archive.py b/tools/isolate/run_test_from_archive.py
index 02acecb..247da15 100755
--- a/tools/isolate/run_test_from_archive.py
+++ b/tools/isolate/run_test_from_archive.py
@@ -452,6 +452,7 @@ class Cache(object):
self._dirty = True
self.state = []
else:
+ added = 0
for filename in os.listdir(self.cache_dir):
if filename == self.STATE_FILE:
continue
@@ -466,8 +467,10 @@ class Cache(object):
else:
# Insert as the oldest file. It will be deleted eventually if not
# accessed.
- logging.warn('Adding back unknown file %s in cache', filename)
self._add(filename, False)
+ added += 1
+ if added:
+ logging.warn('Added back %d unknown files', added)
self.state = [
(filename, size) for filename, size in self.state
if filename not in previous
@@ -485,14 +488,14 @@ class Cache(object):
self.trim()
logging.info(
- '%3d (%5dkb) added', len(self._added), sum(self._added) / 1024)
+ '%4d (%7dkb) added', len(self._added), sum(self._added) / 1024)
logging.info(
- '%3d (%5dkb) current',
+ '%4d (%7dkb) current',
len(self.state),
sum(i[1] for i in self.state) / 1024)
logging.info(
- '%3d (%5dkb) removed', len(self._removed), sum(self._removed) / 1024)
- logging.info('%5dkb free', self._free_disk / 1024)
+ '%4d (%7dkb) removed', len(self._removed), sum(self._removed) / 1024)
+ logging.info('%7dkb free', self._free_disk / 1024)
def remove_lru_file(self):
"""Removes the last recently used file."""
diff --git a/tools/isolate/trace_inputs.py b/tools/isolate/trace_inputs.py
index b54e936..812d05a 100755
--- a/tools/isolate/trace_inputs.py
+++ b/tools/isolate/trace_inputs.py
@@ -554,13 +554,14 @@ def write_json(filepath_or_handle, data, dense):
"""
if hasattr(filepath_or_handle, 'write'):
if dense:
- filepath_or_handle.write(json.dumps(data, separators=(',',':')))
+ filepath_or_handle.write(
+ json.dumps(data, sort_keys=True, separators=(',',':')))
else:
filepath_or_handle.write(json.dumps(data, sort_keys=True, indent=2))
else:
with open(filepath_or_handle, 'wb') as f:
if dense:
- json.dump(data, f, separators=(',',':'))
+ json.dump(data, f, sort_keys=True, separators=(',',':'))
else:
json.dump(data, f, sort_keys=True, indent=2)