summaryrefslogtreecommitdiffstats
path: root/tools/deep_memory_profiler
diff options
context:
space:
mode:
authorjl@opera.com <jl@opera.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-03 09:14:13 +0000
committerjl@opera.com <jl@opera.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-03 09:14:13 +0000
commitf0efc503a29fc2bcd6721a12ea081aaf1c7b68b7 (patch)
treee864df954f2e1233959ebf690df1b8e47ba6df59 /tools/deep_memory_profiler
parent58d170589e7f604ddabd1448a6f8913d699a2b6c (diff)
downloadchromium_src-f0efc503a29fc2bcd6721a12ea081aaf1c7b68b7.zip
chromium_src-f0efc503a29fc2bcd6721a12ea081aaf1c7b68b7.tar.gz
chromium_src-f0efc503a29fc2bcd6721a12ea081aaf1c7b68b7.tar.bz2
Make dmprof handle long runs better
Read the dump files one by one as they are processed instead of reading them all into memory at the start of the program, to reduce peak memory consumption. This makes it possible to process larger data sets. Also restructure a few sub-command implementations to avoid iterating over the list of dumps more than once and to avoid accessing any given dump (typically the first) more than once. BUG= Review URL: https://codereview.chromium.org/141563014 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@248458 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools/deep_memory_profiler')
-rw-r--r--tools/deep_memory_profiler/lib/dump.py26
-rw-r--r--tools/deep_memory_profiler/subcommands/cat.py12
-rw-r--r--tools/deep_memory_profiler/subcommands/policies.py14
3 files changed, 30 insertions, 22 deletions
diff --git a/tools/deep_memory_profiler/lib/dump.py b/tools/deep_memory_profiler/lib/dump.py
index dc4b898..c207f64 100644
--- a/tools/deep_memory_profiler/lib/dump.py
+++ b/tools/deep_memory_profiler/lib/dump.py
@@ -407,28 +407,30 @@ class Dump(object):
class DumpList(object):
- """Represents a sequence of heap profile dumps."""
+ """Represents a sequence of heap profile dumps.
- def __init__(self, dump_list):
- self._dump_list = dump_list
+ Individual dumps are loaded into memory lazily as the sequence is accessed,
+ either while being iterated through or randomly accessed. Loaded dumps are
+ not cached, meaning a newly loaded Dump object is returned every time an
+ element in the list is accessed.
+ """
+
+ def __init__(self, dump_path_list):
+ self._dump_path_list = dump_path_list
@staticmethod
def load(path_list):
- LOGGER.info('Loading heap dump profiles.')
- dump_list = []
- for path in path_list:
- dump_list.append(Dump.load(path, ' '))
- return DumpList(dump_list)
+ return DumpList(path_list)
def __len__(self):
- return len(self._dump_list)
+ return len(self._dump_path_list)
def __iter__(self):
- for dump in self._dump_list:
- yield dump
+ for dump in self._dump_path_list:
+ yield Dump.load(dump)
def __getitem__(self, index):
- return self._dump_list[index]
+ return Dump.load(self._dump_path_list[index])
class ProcMapsEntryAttribute(ExclusiveRangeDict.RangeAttribute):
diff --git a/tools/deep_memory_profiler/subcommands/cat.py b/tools/deep_memory_profiler/subcommands/cat.py
index 9fdc32d..5430ce1 100644
--- a/tools/deep_memory_profiler/subcommands/cat.py
+++ b/tools/deep_memory_profiler/subcommands/cat.py
@@ -43,12 +43,6 @@ class CatCommand(SubCommand):
json_root = OrderedDict()
json_root['version'] = 1
json_root['run_id'] = None
- for dump in dumps:
- if json_root['run_id'] and json_root['run_id'] != dump.run_id:
- LOGGER.error('Inconsistent heap profile dumps.')
- json_root['run_id'] = ''
- break
- json_root['run_id'] = dump.run_id
json_root['roots'] = []
for sorter in sorters:
if sorter.root:
@@ -72,6 +66,12 @@ class CatCommand(SubCommand):
json_root['snapshots'] = []
for dump in dumps:
+ if json_root['run_id'] and json_root['run_id'] != dump.run_id:
+ LOGGER.error('Inconsistent heap profile dumps.')
+ json_root['run_id'] = ''
+ else:
+ json_root['run_id'] = dump.run_id
+
LOGGER.info('Sorting a dump %s...' % dump.path)
json_root['snapshots'].append(
self._fill_snapshot(dump, bucket_set, sorters))
diff --git a/tools/deep_memory_profiler/subcommands/policies.py b/tools/deep_memory_profiler/subcommands/policies.py
index 0b9181a383..5de1fe7 100644
--- a/tools/deep_memory_profiler/subcommands/policies.py
+++ b/tools/deep_memory_profiler/subcommands/policies.py
@@ -299,9 +299,12 @@ class CSVCommand(PolicyCommands):
','.join(components), ',' * (max_components - len(components))))
LOGGER.info('Applying a policy %s to...' % label)
- for dump in dumps:
+ for index, dump in enumerate(dumps):
+ if index == 0:
+ first_dump_time = dump.time
component_sizes = self._apply_policy(
- dump, pfn_counts_dict, policy_set[label], bucket_set, dumps[0].time)
+ dump, pfn_counts_dict, policy_set[label], bucket_set,
+ first_dump_time)
s = []
for c in components:
if c in ('hour', 'minute', 'second'):
@@ -341,9 +344,12 @@ class JSONCommand(PolicyCommands):
}
LOGGER.info('Applying a policy %s to...' % label)
- for dump in dumps:
+ for index, dump in enumerate(dumps):
+ if index == 0:
+ first_dump_time = dump.time
component_sizes = self._apply_policy(
- dump, pfn_counts_dict, policy_set[label], bucket_set, dumps[0].time)
+ dump, pfn_counts_dict, policy_set[label], bucket_set,
+ first_dump_time)
component_sizes['dump_path'] = dump.path
component_sizes['dump_time'] = datetime.datetime.fromtimestamp(
dump.time).strftime('%Y-%m-%d %H:%M:%S')