From 640bf44aa16d9401711b113b02983a2545f642af Mon Sep 17 00:00:00 2001 From: "terry@google.com" Date: Thu, 17 Apr 2014 15:55:13 +0000 Subject: Branching for 1847 @251904 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@264525 0039d316-1c4b-4281-b951-d872f2087c98 --- android_webview/common/aw_content_client.cc | 2 +- build/all.gyp | 12 -- build/common.gypi | 2 +- content/browser/webui/url_data_manager_backend.cc | 4 +- content/worker/worker_webkitplatformsupport_impl.h | 2 - dartium_tools/archive.py | 203 ------------------ dartium_tools/build.py | 57 ------ dartium_tools/fetch_reference_build.py | 48 ----- dartium_tools/get_chromium_build.py | 171 ---------------- dartium_tools/set_reference_build_revision.py | 29 --- dartium_tools/update_deps.py | 226 --------------------- 11 files changed, 3 insertions(+), 753 deletions(-) delete mode 100755 dartium_tools/archive.py delete mode 100755 dartium_tools/build.py delete mode 100755 dartium_tools/fetch_reference_build.py delete mode 100755 dartium_tools/get_chromium_build.py delete mode 100755 dartium_tools/set_reference_build_revision.py delete mode 100755 dartium_tools/update_deps.py diff --git a/android_webview/common/aw_content_client.cc b/android_webview/common/aw_content_client.cc index 90a91e0..b1071c1 100644 --- a/android_webview/common/aw_content_client.cc +++ b/android_webview/common/aw_content_client.cc @@ -27,7 +27,7 @@ namespace android_webview { std::string GetUserAgent() { // "Version/4.0" had been hardcoded in the legacy WebView. - std::string product = "Version/4.0 (Dart) " + GetProduct(); + std::string product = "Version/4.0 " + GetProduct(); if (CommandLine::ForCurrentProcess()->HasSwitch( switches::kUseMobileUserAgent)) { product += " Mobile"; diff --git a/build/all.gyp b/build/all.gyp index f95401a..651982f 100644 --- a/build/all.gyp +++ b/build/all.gyp @@ -243,18 +243,6 @@ ], }, # target_name: All_syzygy { - 'target_name': 'dartium_builder', - 'type': 'none', - 'dependencies': [ - '../dart/pkg/pkg.gyp:pkg_packages', - # '../webkit/webkit.gyp:pull_in_webkit_unit_tests', - '../chrome/chrome.gyp:chrome', - '../chrome/chrome.gyp:chromedriver', - '../content/content_shell_and_tests.gyp:content_shell', - 'blink_tests', - ], - }, - { # Note: Android uses android_builder_tests below. # TODO: Consider merging that with this target. 'target_name': 'chromium_builder_tests', diff --git a/build/common.gypi b/build/common.gypi index ef26543..ebec615 100644 --- a/build/common.gypi +++ b/build/common.gypi @@ -420,7 +420,7 @@ 'enable_extensions%': 1, # Enable Google Now. - 'enable_google_now%': 0, + 'enable_google_now%': 1, # Enable printing support and UI. This variable is used to configure # which parts of printing will be built. 0 disables printing completely, diff --git a/content/browser/webui/url_data_manager_backend.cc b/content/browser/webui/url_data_manager_backend.cc index a4d1ff7..f2ccafb 100644 --- a/content/browser/webui/url_data_manager_backend.cc +++ b/content/browser/webui/url_data_manager_backend.cc @@ -49,12 +49,10 @@ namespace content { namespace { -// TODO(jacobr) remove https://www.google.com when the dependency on the -// Google Charts API is removed from the Dart Observatory. // TODO(tsepez) remove unsafe-eval when bidichecker_packaged.js fixed. const char kChromeURLContentSecurityPolicyHeaderBase[] = "Content-Security-Policy: script-src chrome://resources " - "'self' https://www.google.com 'unsafe-eval'; "; + "'self' 'unsafe-eval'; "; const char kChromeURLXFrameOptionsHeader[] = "X-Frame-Options: DENY"; diff --git a/content/worker/worker_webkitplatformsupport_impl.h b/content/worker/worker_webkitplatformsupport_impl.h index cb66a3f..9fa56d4 100644 --- a/content/worker/worker_webkitplatformsupport_impl.h +++ b/content/worker/worker_webkitplatformsupport_impl.h @@ -83,8 +83,6 @@ class WorkerWebKitPlatformSupportImpl : public BlinkPlatformImpl, const blink::WebString&); virtual blink::WebMimeRegistry::SupportsType supportsJavaScriptMIMEType( const blink::WebString&); - virtual blink::WebMimeRegistry::SupportsType supportsDartMIMEType( - const blink::WebString&); virtual blink::WebMimeRegistry::SupportsType supportsMediaMIMEType( const blink::WebString&, const blink::WebString&, diff --git a/dartium_tools/archive.py b/dartium_tools/archive.py deleted file mode 100755 index b1efcde..0000000 --- a/dartium_tools/archive.py +++ /dev/null @@ -1,203 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2011 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import glob -import optparse -import os -import shutil -import subprocess -import sys -import utils - -HOST_OS = utils.guessOS() - -if HOST_OS == 'mac': - VERSION_FILE = 'Chromium.app/Contents/MacOS/VERSION' - CONTENTSHELL_FILES = ['Content Shell.app', 'ffmpegsumo.so', 'osmesa.so', - 'lib'] - CHROMEDRIVER_FILES = ['chromedriver'] -elif HOST_OS == 'linux': - VERSION_FILE = 'VERSION' - CONTENTSHELL_FILES = ['content_shell', 'content_shell.pak', 'fonts.conf', - 'libffmpegsumo.so', 'libosmesa.so', 'lib'] - CHROMEDRIVER_FILES = ['chromedriver'] -elif HOST_OS == 'win': - VERSION_FILE = 'VERSION' - # TODO: provide proper list. - CONTENTSHELL_FILES = ['content_shell.exe', 'AHEM____.ttf'] - CHROMEDRIVER_FILES = ['chromedriver.exe'] -else: - raise Exception('Unsupported platform') - -# Append a file with size of the snapshot. -CONTENTSHELL_FILES.append('snapshot-size.txt') - - -def GenerateVersionFile(): - # TODO: fix it. - if HOST_OS == 'win': return - versionInfo = utils.getCommandOutput(os.path.join('..', '..', - 'dartium_tools', - 'print_dart_version.sh')) - file = open(VERSION_FILE, 'w') - file.write(versionInfo) - file.close() - - -def GenerateDartiumFileList(mode, srcpath): - def blacklisted(name): - # We include everything if this is a debug build. - if mode.lower() == 'debug': - return True - else: - # We don't include .debug/.pdb files if this is a release build. - if name.endswith('.debug') or name.endswith('.pdb'): - return False - return True - - configFile = os.path.join(srcpath, 'chrome', 'tools', 'build', HOST_OS, - 'FILES.cfg') - configNamespace = {} - execfile(configFile, configNamespace) - fileList = [file['filename'] for file in configNamespace['FILES']] - - # The debug version of dartium on our bots build with - # 'component=shared_library', so we need to include all libraries - # (i.e. 'lib/*.so) as we do on the CONTENTSHELL_FILES list above. - if HOST_OS == 'linux' and mode.lower() == 'debug': - fileList.append('lib') - - # Filter out files we've blacklisted and don't want to include. - fileList = filter(blacklisted, fileList) - return fileList - - -def GenerateContentShellFileList(srcpath): - return CONTENTSHELL_FILES - - -def GenerateChromeDriverFileList(srcpath): - return CHROMEDRIVER_FILES - - -def ZipDir(zipFile, directory): - if HOST_OS == 'win': - cmd = os.path.normpath(os.path.join( - os.path.dirname(__file__), - '../third_party/lzma_sdk/Executable/7za.exe')) - options = ['a', '-r', '-tzip'] - else: - cmd = 'zip' - options = ['-yr'] - utils.runCommand([cmd] + options + [zipFile, directory]) - - -def GenerateZipFile(zipFile, stageDir, fileList): - # Stage files. - for fileName in fileList: - fileName = fileName.rstrip(os.linesep) - targetName = os.path.join(stageDir, fileName) - try: - targetDir = os.path.dirname(targetName) - if not os.path.exists(targetDir): - os.makedirs(targetDir) - if os.path.isdir(fileName): - # TODO: This is a hack to handle duplicates on the fileList of the - # form: [ 'lib/foo.so', 'lib' ] - if os.path.exists(targetName) and os.path.isdir(targetName): - shutil.rmtree(targetName) - shutil.copytree(fileName, targetName) - elif os.path.exists(fileName): - shutil.copy2(fileName, targetName) - except: - import traceback - print 'Troubles processing %s [cwd=%s]: %s' % (fileName, os.getcwd(), traceback.format_exc()) - - ZipDir(zipFile, stageDir) - - -def StageAndZip(fileList, target): - if not target: - return None - - stageDir = target - zipFile = stageDir + '.zip' - - # Cleanup old files. - if os.path.exists(stageDir): - shutil.rmtree(stageDir) - os.mkdir(stageDir) - oldFiles = glob.glob(target.split('-')[0] + '*.zip') - for oldFile in oldFiles: - os.remove(oldFile) - - GenerateVersionFile() - GenerateZipFile(zipFile, stageDir, fileList) - print 'last change: %s' % (zipFile) - - # Clean up. Buildbot disk space is limited. - shutil.rmtree(stageDir) - - return zipFile - - -def Archive(srcpath, mode, dartium_target, contentshell_target, - chromedriver_target, is_win_ninja=False): - # We currently build using ninja on mac debug. - if HOST_OS == 'mac': - releaseDir = os.path.join(srcpath, 'out', mode) - # Also package dynamic libraries. - extra_files = [file for file in os.listdir(releaseDir) if file.endswith('.dylib')] - elif HOST_OS == 'linux': - releaseDir = os.path.join(srcpath, 'out', mode) - extra_files = [] - elif HOST_OS == 'win': - if is_win_ninja: - releaseDir = os.path.join(srcpath, 'out', mode) - else: - releaseDir = os.path.join(srcpath, 'build', mode) - # issue(16760) - we _need_ to fix our parsing of the FILES.cfg - extra_files = [file for file in os.listdir(releaseDir) if file.endswith('manifest')] - else: - raise Exception('Unsupported platform') - os.chdir(releaseDir) - - dartium_zip = StageAndZip( - GenerateDartiumFileList(mode, srcpath) + extra_files, dartium_target) - contentshell_zip = StageAndZip(GenerateContentShellFileList(srcpath) + extra_files, - contentshell_target) - chromedriver_zip = StageAndZip(GenerateChromeDriverFileList(srcpath) + extra_files, - chromedriver_target) - return (dartium_zip, contentshell_zip, chromedriver_zip) - - -def main(): - pathname = os.path.dirname(sys.argv[0]) - fullpath = os.path.abspath(pathname) - srcpath = os.path.join(fullpath, '..') - - parser = optparse.OptionParser() - parser.add_option('--dartium', dest='dartium', - action='store', type='string', - help='dartium archive name') - parser.add_option('--contentshell', dest='contentshell', - action='store', type='string', - help='content shell archive name') - parser.add_option('--chromedriver', dest='chromedriver', - action='store', type='string', - help='chromedriver archive name') - parser.add_option('--mode', dest='mode', - default='Release', - action='store', type='string', - help='(Release|Debug)') - (options, args) = parser.parse_args() - Archive(srcpath, options.mode, options.dartium, options.contentshell, - options.chromedriver) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/dartium_tools/build.py b/dartium_tools/build.py deleted file mode 100755 index df511b0..0000000 --- a/dartium_tools/build.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2010 Google Inc. All Rights Reserved. - -# This file is used by the buildbot. - -import optparse -import os.path -import utils - -ALL_TARGETS = [ - 'content_shell', - 'chrome', - 'blink_tests', - 'pkg_packages', -] - -def main(): - parser = optparse.OptionParser() - parser.add_option('--target', dest='target', - default='all', - action='store', type='string', - help='Target (%s)' % ', '.join(ALL_TARGETS)) - parser.add_option('--mode', dest='mode', - action='store', type='string', - help='Build mode (Debug or Release)') - parser.add_option('--clobber', dest='clobber', - action='store_true', - help='Clobber the output directory') - parser.add_option('-j', '--jobs', dest='jobs', - action='store', - help='Number of jobs') - (options, args) = parser.parse_args() - mode = options.mode - if options.jobs: - jobs = options.jobs - else: - jobs = utils.guessCpus() - if not (mode in ['Debug', 'Release']): - raise Exception('Invalid build mode') - - if options.target == 'all': - targets = ALL_TARGETS - else: - targets = [options.target] - - if options.clobber: - utils.runCommand(['rm', '-rf', 'out']) - - utils.runCommand(['ninja', - '-j%s' % jobs, - '-C', - os.path.join('out', mode)] - + targets) - -if __name__ == '__main__': - main() diff --git a/dartium_tools/fetch_reference_build.py b/dartium_tools/fetch_reference_build.py deleted file mode 100755 index 26a5c54..0000000 --- a/dartium_tools/fetch_reference_build.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Fetches an archived chromium build into - src/chrome/tools/test/reference_build unless - src/chrome/tools/test/reference_build/REQUESTED_REVISION is the same as - src/chrome/tools/test/reference_build/CURRENT_REVISION. - Must be run from the root of a Dartium or multivm checkout. - -Usage: - $ ./src/dartium_tools/fetch_reference_build_revision.py -""" - -import os -import subprocess -import sys - -def main(argv): - dirname = os.path.join('src', 'chrome', 'tools', - 'test', 'reference_build') - request = os.path.join(dirname, 'REQUESTED_REVISION') - found = os.path.join(dirname, 'CURRENT_REVISION') - if not os.path.exists(request): - return - with file(request, 'r') as f: - request_revision = f.read() - - if os.path.exists(found): - with file(found, 'r') as f: - found_revision = f.read() - if found_revision == request_revision: - return - - get_script = os.path.join('src', 'dartium_tools', 'get_chromium_build.py') - get_script = os.path.abspath(get_script) - exit_code = subprocess.call(['python', get_script, - '-r', request_revision, - '-t', dirname]) - if exit_code == 0: - with file(found, 'w') as f: - f.write(request_revision) - return exit_code - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/dartium_tools/get_chromium_build.py b/dartium_tools/get_chromium_build.py deleted file mode 100755 index edb8bac..0000000 --- a/dartium_tools/get_chromium_build.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Gets a Chromium archived build, and unpacks it - into a target directory. - - Use -r option to specify the revison number - Use -t option to specify the directory to unzip the build into. - -Usage: - $ /path/to/get_chromium_build.py -r -t -""" - -import logging -import optparse -import os -import platform -import shutil -import subprocess -import sys -import time -import urllib -import urllib2 -import zipfile - -# Example chromium build location: -# gs://chromium-browser-snapshots/Linux_x64/228977/chrome-linux.zip -CHROMIUM_URL_FMT = ('http://commondatastorage.googleapis.com/' - 'chromium-browser-snapshots/%s/%s/%s') - -class BuildUpdater(object): - _PLATFORM_PATHS_MAP = { - 'Linux': { 'zipfiles': ['chrome-linux.zip'], - 'folder': 'chrome_linux', - 'archive_path': 'Linux_x64'}, - 'Darwin': {'zipfiles': ['chrome-mac.zip'], - 'folder': 'chrome_mac', - 'archive_path': 'Mac'}, - 'Windows': {'zipfiles': ['chrome-win32.zip', - 'chrome-win32-syms.zip'], - 'folder': 'chrome_win', - 'archive_path': 'Win'}} - - def __init__(self, options): - platform_data = BuildUpdater._PLATFORM_PATHS_MAP[platform.system()] - self._zipfiles = platform_data['zipfiles'] - self._folder = platform_data['folder'] - self._archive_path = platform_data['archive_path'] - self._revision = int(options.revision) - self._target_dir = options.target_dir - self._download_dir = os.path.join(self._target_dir, 'downloads') - - def _GetBuildUrl(self, revision, filename): - return CHROMIUM_URL_FMT % (self._archive_path, revision, filename) - - def _FindBuildRevision(self, revision, filename): - MAX_REVISIONS_PER_BUILD = 100 - for revision_guess in xrange(revision, revision + MAX_REVISIONS_PER_BUILD): - if self._DoesBuildExist(revision_guess, filename): - return revision_guess - else: - time.sleep(.1) - return None - - def _DoesBuildExist(self, revision_guess, filename): - url = self._GetBuildUrl(revision_guess, filename) - - r = urllib2.Request(url) - r.get_method = lambda: 'HEAD' - try: - urllib2.urlopen(r) - return True - except urllib2.HTTPError, err: - if err.code == 404: - return False - - def _DownloadBuild(self): - if not os.path.exists(self._download_dir): - os.makedirs(self._download_dir) - for zipfile in self._zipfiles: - build_revision = self._FindBuildRevision(self._revision, zipfile) - if not build_revision: - logging.critical('Failed to find %s build for r%s\n', - self._archive_path, - self._revision) - sys.exit(1) - url = self._GetBuildUrl(build_revision, zipfile) - logging.info('Downloading %s', url) - r = urllib2.urlopen(url) - with file(os.path.join(self._download_dir, zipfile), 'wb') as f: - f.write(r.read()) - - def _UnzipFile(self, dl_file, dest_dir): - if not zipfile.is_zipfile(dl_file): - return False - logging.info('Unzipping %s', dl_file) - with zipfile.ZipFile(dl_file, 'r') as z: - for content in z.namelist(): - dest = os.path.join(dest_dir, content[content.find('/')+1:]) - # Create dest parent dir if it does not exist. - if not os.path.isdir(os.path.dirname(dest)): - logging.info('Making %s', dest) - os.makedirs(os.path.dirname(dest)) - # If dest is just a dir listing, do nothing. - if not os.path.basename(dest): - continue - with z.open(content) as unzipped_content: - logging.info('Extracting %s to %s (%s)', content, dest, dl_file) - with file(dest, 'wb') as dest_file: - dest_file.write(unzipped_content.read()) - permissions = z.getinfo(content).external_attr >> 16 - if permissions: - os.chmod(dest, permissions) - return True - - def _ClearDir(self, dir): - """Clears all files in |dir| except for hidden files and folders.""" - for root, dirs, files in os.walk(dir): - # Skip hidden files and folders (like .svn and .git). - files = [f for f in files if f[0] != '.'] - dirs[:] = [d for d in dirs if d[0] != '.'] - - for f in files: - os.remove(os.path.join(root, f)) - - def _ExtractBuild(self): - dest_dir = os.path.join(self._target_dir, self._folder) - self._ClearDir(dest_dir) - for root, _, dl_files in os.walk(os.path.join(self._download_dir)): - for dl_file in dl_files: - dl_file = os.path.join(root, dl_file) - if not self._UnzipFile(dl_file, dest_dir): - logging.info('Copying %s to %s', dl_file, dest_dir) - shutil.copy(dl_file, dest_dir) - shutil.rmtree(self._download_dir) - - def DownloadAndUpdateBuild(self): - self._DownloadBuild() - self._ExtractBuild() - - -def ParseOptions(argv): - parser = optparse.OptionParser() - usage = 'usage: %prog ' - parser.set_usage(usage) - parser.add_option('-r', dest='revision', - help='Revision to download.') - parser.add_option('-t', dest='target_dir', - help='Target directory for unzipped Chromium.') - - (options, _) = parser.parse_args(argv) - if not options.revision: - logging.critical('Must specify -r.\n') - sys.exit(1) - if not options.target_dir: - logging.critical('Must specify -t.\n') - sys.exit(1) - return options - -def main(argv): - logging.getLogger().setLevel(logging.DEBUG) - options = ParseOptions(argv) - b = BuildUpdater(options) - b.DownloadAndUpdateBuild() - logging.info('Successfully got archived Chromium build.') - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/dartium_tools/set_reference_build_revision.py b/dartium_tools/set_reference_build_revision.py deleted file mode 100755 index e27c1e6..0000000 --- a/dartium_tools/set_reference_build_revision.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Writes a revision number into src/chrome/tools/test/reference_build/REVISON - Must be run from the root of a Dartium or multivm checkout. - -Usage: - $ ./src/dartium_tools/set_reference_build_revision.py -""" - -import os -import sys - -def main(argv): - revision = argv[1] - output = os.path.join('src', 'chrome', 'tools', - 'test', 'reference_build', - 'REQUESTED_REVISION') - dirname = os.path.dirname(output) - if dirname and not os.path.exists(dirname): - os.makedirs(dirname) - with file(output, 'w') as f: - f.write(revision) - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/dartium_tools/update_deps.py b/dartium_tools/update_deps.py deleted file mode 100755 index 0b675a1..0000000 --- a/dartium_tools/update_deps.py +++ /dev/null @@ -1,226 +0,0 @@ -#!/usr/bin/python - -# Update Dartium DEPS automatically. - -from datetime import datetime, timedelta -import optparse -import os -import re -from subprocess import Popen, PIPE -import sys -from time import strptime - -# Instructions: -# -# To run locally: -# (a) Create and change to a directory to run the updater in: -# > mkdir /usr/local/google/home/$USER/dartium_deps_updater -# > cd /usr/local/google/home/$USER/dartium_deps_updater -# -# (b) Checkout a copy of the DEPS for the updater to process / update: -# > svn co https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps -# -# (c) Checkout dartium_tools (with this script) using the current branch instead of 1750: -# > svn co svn://svn.chromium.org/chrome/branches/dart/1750/src/dartium_tools -# -# (d) If your home directory is remote, consider redefining it for this shell/script: -# > cp -R $HOME/.subversion /usr/local/google/home/$USER -# > export HOME=/usr/local/google/home/$USER -# -# (e) Test by running (Ctrl-C to quit): -# > ./dartium_tools/update_deps.py -# -# (f) Run periodical update: -# > while true; do ./dartium_tools/update_deps.py --force ; sleep 300 ; done - -######################################################################## -# Repositories to auto-update -######################################################################## - -# Each element in this map represents a repository to update. Entries -# take the form: -# (repo_tag: (svn_url, view_url)) -# -# The repo_tag must match the DEPS revision entry. I.e, there must be -# an entry of the form: -# 'dartium_%s_revision' % repo_tag -# to roll forward. -# -# The view_url should be parameterized by revision number. This is -# used to generated the commit message. -REPOSITORY_INFO = { - 'webkit': ( - 'http://src.chromium.org/blink/branches/dart/1750', - 'http://src.chromium.org/viewvc/blink/branches/dart/1750?view=rev&revision=%s'), - 'chromium': ( - 'http://src.chromium.org/chrome/branches/dart/1750/src', - 'http://src.chromium.org/viewvc/chrome/branches/dart/1750/src?view=rev&revision=%s'), -} - -REPOSITORIES = REPOSITORY_INFO.keys() - -######################################################################## -# Actions -######################################################################## - -def write_file(filename, content): - f = open(filename, "w") - f.write(content) - f.close() - -def run_cmd(cmd): - print "\n[%s]\n$ %s" % (os.getcwd(), " ".join(cmd)) - pipe = Popen(cmd, stdout=PIPE, stderr=PIPE) - output = pipe.communicate() - if pipe.returncode == 0: - return output[0] - else: - print output[1] - print "FAILED. RET_CODE=%d" % pipe.returncode - sys.exit(pipe.returncode) - -def parse_iso_time(s): - pair = s.rsplit(' ', 1) - d = datetime.strptime(pair[0], '%Y-%m-%d %H:%M:%S') - offset = timedelta(hours=int(pair[1][0:3])) - return d - offset - -def parse_git_log(output, repo): - if len(output) < 4: - return [] - lst = output.split(os.linesep) - lst = [s.strip('\'') for s in lst] - lst = [s.split(',', 3) for s in lst] - lst = [{'repo': repo, - 'rev': s[0], - 'isotime':s[1], - 'author': s[2], - 'utctime': parse_iso_time(s[1]), - 'info': s[3]} for s in lst] - return lst - -def parse_svn_log(output, repo): - lst = output.split(os.linesep) - lst = [s.strip('\'') for s in lst] - output = '_LINESEP_'.join(lst) - lst = output.split('------------------------------------------------------------------------') - lst = [s.replace('_LINESEP_', '\n') for s in lst] - lst = [s.strip('\n') for s in lst] - lst = [s.strip(' ') for s in lst] - lst = [s for s in lst if len(s) > 0] - pattern = re.compile(' \| (\d+) line(s|)') - lst = [pattern.sub(' | ', s) for s in lst] - lst = [s.split(' | ', 3) for s in lst] - lst = [{'repo': repo, - 'rev': s[0].replace('r', ''), - 'author': s[1], - 'isotime':s[2][0:25], - 'utctime': parse_iso_time(s[2][0:25]), - 'info': s[3].split('\n')[2]} for s in lst] - return lst - -def commit_url(repo, rev): - numrev = rev.replace('r', '') - if repo in REPOSITORIES: - (_, view_url) = REPOSITORY_INFO[repo] - return view_url % numrev - else: - raise Exception('Unknown repo'); - -def find_max(revs): - max_time = None - max_position = None - for i, rev in enumerate(revs): - if rev == []: - continue - if max_time is None or rev[0]['utctime'] > max_time: - max_time = rev[0]['utctime'] - max_position = i - return max_position - -def merge_revs(revs): - position = find_max(revs) - if position is None: - return [] - item = revs[position][0] - revs[position] = revs[position][1:] - return [item] + merge_revs(revs) - -def main(): - option_parser = optparse.OptionParser() - option_parser.add_option('', '--force', help="Push DEPS update to server without prompting", action="store_true", dest="force") - options, args = option_parser.parse_args() - - src_dir = "/usr/local/google/home/%s/dartium_deps_updater/dartium.deps" % os.environ["USER"] - os.putenv("GIT_PAGER", "") - - if not os.path.exists(src_dir): - print "Error: prior to running this script, you need to check out a Dartium source tree at" - print " %s" % src_dir - print "Please reserve the above directory for this script and do not use it for other purposes." - sys.exit(1) - - os.chdir(src_dir) - - # parse DEPS - deps = run_cmd(['svn', 'cat', 'https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps/DEPS']) - rev_num = {} - for repo in REPOSITORIES: - revision = 'dartium_%s_revision":\s*"(.+)"' % repo - rev_num[repo] = re.search(revision, deps).group(1) - - # update repos - all_revs = [] - for repo, (svn_url, _) in REPOSITORY_INFO.items(): - output = run_cmd(["svn", "log", "-r", "HEAD:%s" % rev_num[repo], svn_url]) - revs = parse_svn_log(output, repo) - if revs and revs[-1]['rev'] == rev_num[repo]: - revs.pop() - all_revs.append(revs) - - pending_updates = merge_revs(all_revs) - pending_updates.reverse() - - print - print "Current DEPS revisions:" - for repo in REPOSITORIES: - print ' dartium_%s_revision=%s' % (repo, rev_num[repo]) - - if len(pending_updates) == 0: - print "DEPS is up-to-date." - sys.exit(0) - else: - print "Pending DEPS updates:" - for s in pending_updates: - print " %s to %s (%s) %s" % (s['repo'], s['rev'], s['isotime'], s['info']) - - # make the next DEPS update - os.chdir(src_dir) - run_cmd(['rm', 'DEPS']) - print run_cmd(['svn', 'update']) - s = pending_updates[0] - - pattern = re.compile('dartium_' + s['repo'] + '_revision":\s*"(.+)"') - new_deps = pattern.sub('dartium_' + s['repo'] + '_revision": "' + s['rev'] + '"', deps) - write_file('DEPS', new_deps) - - commit_log = 'DEPS AutoUpdate: %s to %s (%s) %s\n' % (s['repo'], s['rev'], s['isotime'], s['author']) - commit_log += s['info'] + '\n' + commit_url(s['repo'], s['rev']) - - write_file('commit_log.txt', commit_log) - print run_cmd(['svn', 'diff']) - print - print "Commit log:" - print "---------------------------------------------" - print commit_log - print "---------------------------------------------" - - if not options.force: - print "Ready to push; press Enter to continue or Control-C to abort..." - sys.stdin.readline() - print run_cmd(['svn', 'commit', '--file', 'commit_log.txt']) - print "Done." - - -if '__main__' == __name__: - main() -- cgit v1.1