diff options
Diffstat (limited to 'tools/android/loading/sandwich.py')
-rwxr-xr-x | tools/android/loading/sandwich.py | 60 |
1 files changed, 55 insertions, 5 deletions
diff --git a/tools/android/loading/sandwich.py b/tools/android/loading/sandwich.py index a76c82b..86d730f 100755 --- a/tools/android/loading/sandwich.py +++ b/tools/android/loading/sandwich.py @@ -35,9 +35,12 @@ import chrome_cache import chrome_setup import device_setup import devtools_monitor +import frame_load_lens +import loading_trace import options import page_track import pull_sandwich_metrics +import request_dependencies_lens import trace_recorder import tracing import wpr_backend @@ -206,17 +209,17 @@ class SandwichRunner(object): connection=connection, emulated_device_name=None, emulated_network_name=self._GetEmulatorNetworkCondition('browser')) - loading_trace = trace_recorder.MonitorUrl( + trace = trace_recorder.MonitorUrl( connection, url, clear_cache=clear_cache, categories=pull_sandwich_metrics.CATEGORIES, timeout=_DEVTOOLS_TIMEOUT) - loading_trace.metadata.update(additional_metadata) + trace.metadata.update(additional_metadata) if trace_id != None and self.trace_output_directory: - loading_trace_path = os.path.join( + trace_path = os.path.join( self.trace_output_directory, str(trace_id), 'trace.json') - os.makedirs(os.path.dirname(loading_trace_path)) - loading_trace.ToJsonFile(loading_trace_path) + os.makedirs(os.path.dirname(trace_path)) + trace.ToJsonFile(trace_path) def _RunUrl(self, url, trace_id=0): clear_cache = False @@ -369,6 +372,23 @@ def _ArgumentParser(): help='Path where to save the metrics\'s '+ 'CSV.') + # Filter cache subcommand. + filter_cache_parser = subparsers.add_parser('filter-cache', + help='Cache filtering that keeps only resources discoverable by the HTML'+ + ' document parser.') + filter_cache_parser.add_argument('--cache-archive', type=str, required=True, + dest='cache_archive_path', + help='Path of the cache archive to filter.') + filter_cache_parser.add_argument('--output', type=str, required=True, + dest='output_cache_archive_path', + help='Path of filtered cache archive.') + filter_cache_parser.add_argument('loading_trace_paths', type=str, nargs='+', + metavar='LOADING_TRACE', + help='A list of loading traces generated by a sandwich run for a given' + + ' url. This is used to have a resource dependency graph to white-' + + 'list the ones discoverable by the HTML pre-scanner for that given ' + + 'url.') + return parser @@ -445,6 +465,34 @@ def _ExtractMetricsMain(args): return 0 +def _FilterCacheMain(args): + whitelisted_urls = set() + for loading_trace_path in args.loading_trace_paths: + logging.info('loading %s' % loading_trace_path) + trace = loading_trace.LoadingTrace.FromJsonFile(loading_trace_path) + requests_lens = request_dependencies_lens.RequestDependencyLens(trace) + deps = requests_lens.GetRequestDependencies() + + main_resource_request = deps[0][0] + logging.info('white-listing %s' % main_resource_request.url) + whitelisted_urls.add(main_resource_request.url) + for (first, second, reason) in deps: + # Ignore data protocols. + if not second.protocol.startswith('http'): + continue + if (first.request_id == main_resource_request.request_id and + reason == 'parser' and second.url not in whitelisted_urls): + logging.info('white-listing %s' % second.url) + whitelisted_urls.add(second.url) + + if not os.path.isdir(os.path.dirname(args.output_cache_archive_path)): + os.makedirs(os.path.dirname(args.output_cache_archive_path)) + chrome_cache.ApplyUrlWhitelistToCacheArchive(args.cache_archive_path, + whitelisted_urls, + args.output_cache_archive_path) + return 0 + + def main(command_line_args): logging.basicConfig(level=logging.INFO) devil_chromium.Initialize() @@ -465,6 +513,8 @@ def main(command_line_args): return _RunJobMain(args) if args.subcommand == 'extract-metrics': return _ExtractMetricsMain(args) + if args.subcommand == 'filter-cache': + return _FilterCacheMain(args) assert False |