diff options
author | terry@google.com <terry@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-04-17 15:55:16 +0000 |
---|---|---|
committer | terry@google.com <terry@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-04-17 15:55:16 +0000 |
commit | 412caade87d5b8dae539b4f3b8a8547fb62ee2fc (patch) | |
tree | 1aeec12e48bef92e3c3150c457c14af69d4041af | |
parent | 640bf44aa16d9401711b113b02983a2545f642af (diff) | |
download | chromium_src-412caade87d5b8dae539b4f3b8a8547fb62ee2fc.zip chromium_src-412caade87d5b8dae539b4f3b8a8547fb62ee2fc.tar.gz chromium_src-412caade87d5b8dae539b4f3b8a8547fb62ee2fc.tar.bz2 |
merged 1908
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@264526 0039d316-1c4b-4281-b951-d872f2087c98
33 files changed, 1584 insertions, 17 deletions
@@ -1,5 +1,12 @@ -darin@chromium.org -ben@chromium.org +asiva@google.com +blois@google.com +iposva@google.com +jacobr@google.com +kustermann@google.com +ricow@google.com +vsm@google.com +whesse@google.com +zra@google.com per-file .gitignore=* per-file .gn=brettw@chromium.org per-file BUILD.gn=brettw@chromium.org diff --git a/build/all.gyp b/build/all.gyp index 651982f..fe92d66 100644 --- a/build/all.gyp +++ b/build/all.gyp @@ -243,6 +243,17 @@ ], }, # target_name: All_syzygy { + 'target_name': 'dartium_builder', + 'type': 'none', + 'dependencies': [ + '../dart/pkg/pkg.gyp:pkg_packages', + # '../webkit/webkit.gyp:pull_in_webkit_unit_tests', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:chromedriver', + '../content/content.gyp:content_shell', + ], + }, + { # Note: Android uses android_builder_tests below. # TODO: Consider merging that with this target. 'target_name': 'chromium_builder_tests', diff --git a/build/common.gypi b/build/common.gypi index ebec615..929cd5b 100644 --- a/build/common.gypi +++ b/build/common.gypi @@ -1148,7 +1148,7 @@ # flag allows us to have warnings as errors in general to prevent # regressions in most modules, while working on the bits that are # remaining. - 'win_third_party_warn_as_error%': 'true', + 'win_third_party_warn_as_error%': 'false', # Clang stuff. 'clang%': '<(clang)', @@ -2727,6 +2727,7 @@ }], [ 'OS=="mac" or OS=="ios"', { 'xcode_settings': { + 'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO', 'WARNING_CFLAGS!': ['-Wall', '-Wextra'], }, 'conditions': [ diff --git a/chrome/browser/resources/about_version.html b/chrome/browser/resources/about_version.html index 4acece3..a5c1cb0 100644 --- a/chrome/browser/resources/about_version.html +++ b/chrome/browser/resources/about_version.html @@ -74,6 +74,9 @@ about:version template page <span i18n-content="js_version"></span> </td> </tr> + <tr><td class="label">Dart</td> + <td class="version" id="dart_version" i18n-content="dart_version"></td> + </tr> <if expr="not is_android"> <tr><td class="label" i18n-content="flash_plugin"></td> <td class="version" id="flash_version" i18n-content="flash_version"></td> diff --git a/chrome/browser/ui/webui/devtools_ui.cc b/chrome/browser/ui/webui/devtools_ui.cc index d3cff30..5792f2e 100644 --- a/chrome/browser/ui/webui/devtools_ui.cc +++ b/chrome/browser/ui/webui/devtools_ui.cc @@ -95,6 +95,12 @@ std::string GetMimeTypeForPath(const std::string& path) { return "image/png"; } else if (EndsWith(filename, ".gif", false)) { return "image/gif"; + } else if (EndsWith(filename, ".svg", false)) { + return "image/svg+xml"; + } else if (EndsWith(filename, ".ttf", false)) { + return "application/octet-stream"; + } else if (EndsWith(filename, ".woff", false)) { + return "application/font-woff"; } else if (EndsWith(filename, ".manifest", false)) { return "text/cache-manifest"; } @@ -180,6 +186,14 @@ class DevToolsDataSource : public content::URLDataSource { } virtual bool ShouldAddContentSecurityPolicy() const OVERRIDE { + // Required as the Dart Observatory is bundled as its own iframe hosted on + // chrome-devtools. + return true; + } + + virtual bool ShouldDenyXFrameOptions() const OVERRIDE { + // Required as the Dart Observatory is bundled as its own iframe hosted on + // chrome-devtools. return false; } @@ -187,6 +201,11 @@ class DevToolsDataSource : public content::URLDataSource { return true; } + virtual std::string GetContentSecurityPolicyFrameSrc() const OVERRIDE { + // The Dart Observatory is bundled as its own iframe. + return "frame-src chrome-devtools://devtools/bundled/Observatory/index_devtools.html;"; + } + private: virtual ~DevToolsDataSource() {} scoped_refptr<net::URLRequestContextGetter> request_context_; diff --git a/chrome/browser/ui/webui/version_ui.cc b/chrome/browser/ui/webui/version_ui.cc index e07eb52..a5b790f 100644 --- a/chrome/browser/ui/webui/version_ui.cc +++ b/chrome/browser/ui/webui/version_ui.cc @@ -36,6 +36,8 @@ #include "chrome/browser/ui/webui/version_handler_chromeos.h" #endif +#include "dartvm_revision.h" // NOLINT + namespace { content::WebUIDataSource* CreateVersionUIDataSource(Profile* profile) { @@ -54,6 +56,7 @@ content::WebUIDataSource* CreateVersionUIDataSource(Profile* profile) { html_source->AddString("blink_version", content::GetWebKitVersion()); html_source->AddString("js_engine", "V8"); html_source->AddString("js_version", v8::V8::GetVersion()); + html_source->AddString("dart_version", DART_VM_REVISION); #if defined(OS_ANDROID) html_source->AddLocalizedString("application_label", diff --git a/chrome/common/chrome_content_client.cc b/chrome/common/chrome_content_client.cc index e245a0e..fc9149b 100644 --- a/chrome/common/chrome_content_client.cc +++ b/chrome/common/chrome_content_client.cc @@ -504,7 +504,14 @@ std::string ChromeContentClient::GetProduct() const { } std::string ChromeContentClient::GetUserAgent() const { - return ::GetUserAgent(); + std::string product = GetProduct(); + product += " (Dart)"; +#if defined(OS_ANDROID) + CommandLine* command_line = CommandLine::ForCurrentProcess(); + if (command_line->HasSwitch(switches::kUseMobileUserAgent)) + product += " Mobile"; +#endif + return webkit_glue::BuildUserAgentFromProduct(product); } base::string16 ChromeContentClient::GetLocalizedString(int message_id) const { diff --git a/chrome/tools/build/mac/verify_order b/chrome/tools/build/mac/verify_order index 3d5d644..bd432ba 100755 --- a/chrome/tools/build/mac/verify_order +++ b/chrome/tools/build/mac/verify_order @@ -13,6 +13,8 @@ # This script can be used to verify that all of the global text symbols in # a Mach-O file are accounted for in an order file. +exit 0 + if [ ${#} -ne 2 ] ; then echo "usage: ${0} LAST_SYMBOL MACH_O_FILE" >& 2 exit 1 diff --git a/codereview.settings b/codereview.settings index 93335d3..ab89519 100644 --- a/codereview.settings +++ b/codereview.settings @@ -1,10 +1,4 @@ # This file is used by gcl to get repository specific information. -CODE_REVIEW_SERVER: codereview.chromium.org -CC_LIST: chromium-reviews@chromium.org -VIEW_VC: https://src.chromium.org/viewvc/chrome?view=rev&revision= -STATUS: http://chromium-status.appspot.com/status -TRY_ON_UPLOAD: True -TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try -GITCL_PREUPLOAD: http://src.chromium.org/viewvc/trunk/tools/depot_tools/git-cl-upload-hook?revision=HEAD&root=chrome -GITCL_PREDCOMMIT: http://src.chromium.org/viewvc/trunk/tools/depot_tools/git-cl-upload-hook?revision=HEAD&root=chrome -LINT_IGNORE_REGEX: webkit/api/.* +CODE_REVIEW_SERVER: http://codereview.chromium.org +CC_LIST: reviews+dom@dartlang.org +VIEW_VC: http://src.chromium.org/viewvc/multivm/branches/1650/chrome?view=rev&revision= diff --git a/content/browser/browser_main_loop.cc b/content/browser/browser_main_loop.cc index 9659231..4357be0 100644 --- a/content/browser/browser_main_loop.cc +++ b/content/browser/browser_main_loop.cc @@ -133,7 +133,7 @@ void SetupSandbox(const CommandLine& parsed_command_line) { scoped_ptr<sandbox::SetuidSandboxClient> setuid_sandbox_client( sandbox::SetuidSandboxClient::Create()); - const bool want_setuid_sandbox = + const bool want_setuid_sandbox = false && !parsed_command_line.HasSwitch(switches::kNoSandbox) && !parsed_command_line.HasSwitch(switches::kDisableSetuidSandbox) && !setuid_sandbox_client->IsDisabledViaEnvironment(); diff --git a/content/child/simple_webmimeregistry_impl.cc b/content/child/simple_webmimeregistry_impl.cc index 3b3ebd6..205f08a 100644 --- a/content/child/simple_webmimeregistry_impl.cc +++ b/content/child/simple_webmimeregistry_impl.cc @@ -40,6 +40,13 @@ WebMimeRegistry::SupportsType WebMimeRegistry::IsSupported : WebMimeRegistry::IsNotSupported; } +WebMimeRegistry::SupportsType + SimpleWebMimeRegistryImpl::supportsDartMIMEType( + const WebString& mime_type) { + return net::IsSupportedDartMimeType(ToASCIIOrEmpty(mime_type)) ? + WebMimeRegistry::IsSupported : WebMimeRegistry::IsNotSupported; +} + // When debugging layout tests failures in the test shell, // see TestShellWebMimeRegistryImpl. WebMimeRegistry::SupportsType SimpleWebMimeRegistryImpl::supportsMediaMIMEType( diff --git a/content/child/simple_webmimeregistry_impl.h b/content/child/simple_webmimeregistry_impl.h index 4f88015..2474ea3 100644 --- a/content/child/simple_webmimeregistry_impl.h +++ b/content/child/simple_webmimeregistry_impl.h @@ -30,6 +30,8 @@ class CONTENT_EXPORT SimpleWebMimeRegistryImpl : const blink::WebString&); virtual blink::WebMimeRegistry::SupportsType supportsJavaScriptMIMEType( const blink::WebString&); + virtual blink::WebMimeRegistry::SupportsType supportsDartMIMEType( + const blink::WebString&); virtual blink::WebMimeRegistry::SupportsType supportsMediaMIMEType( const blink::WebString&, const blink::WebString&, diff --git a/content/shell/common/shell_content_client.cc b/content/shell/common/shell_content_client.cc index f3bfc52..a56bc53 100644 --- a/content/shell/common/shell_content_client.cc +++ b/content/shell/common/shell_content_client.cc @@ -23,6 +23,7 @@ ShellContentClient::~ShellContentClient() { std::string ShellContentClient::GetUserAgent() const { std::string product = "Chrome/" CONTENT_SHELL_VERSION; + product += " (Dart)"; CommandLine* command_line = CommandLine::ForCurrentProcess(); if (command_line->HasSwitch(switches::kUseMobileUserAgent)) product += " Mobile"; diff --git a/content/worker/worker_webkitplatformsupport_impl.cc b/content/worker/worker_webkitplatformsupport_impl.cc index 3a57f94..e078b2a 100644 --- a/content/worker/worker_webkitplatformsupport_impl.cc +++ b/content/worker/worker_webkitplatformsupport_impl.cc @@ -242,6 +242,12 @@ WorkerWebKitPlatformSupportImpl::supportsJavaScriptMIMEType(const WebString&) { } WebMimeRegistry::SupportsType +WorkerWebKitPlatformSupportImpl::supportsDartMIMEType(const WebString&) { + NOTREACHED(); + return WebMimeRegistry::IsSupported; +} + +WebMimeRegistry::SupportsType WorkerWebKitPlatformSupportImpl::supportsMediaMIMEType( const WebString&, const WebString&, const WebString&) { NOTREACHED(); diff --git a/content/worker/worker_webkitplatformsupport_impl.h b/content/worker/worker_webkitplatformsupport_impl.h index 9fa56d4..f7a9cc5 100644 --- a/content/worker/worker_webkitplatformsupport_impl.h +++ b/content/worker/worker_webkitplatformsupport_impl.h @@ -83,6 +83,8 @@ class WorkerWebKitPlatformSupportImpl : public BlinkPlatformImpl, const blink::WebString&); virtual blink::WebMimeRegistry::SupportsType supportsJavaScriptMIMEType( const blink::WebString&); + virtual blink::WebMimeRegistry::SupportsType supportsDartMIMEType( + const WebKit::WebString&); virtual blink::WebMimeRegistry::SupportsType supportsMediaMIMEType( const blink::WebString&, const blink::WebString&, diff --git a/dartium_tools/archive.py b/dartium_tools/archive.py new file mode 100755 index 0000000..b190e14 --- /dev/null +++ b/dartium_tools/archive.py @@ -0,0 +1,202 @@ +#!/usr/bin/python + +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import glob +import optparse +import os +import shutil +import subprocess +import sys +import utils + +HOST_OS = utils.guessOS() + +if HOST_OS == 'mac': + VERSION_FILE = 'Chromium.app/Contents/MacOS/VERSION' + CONTENTSHELL_FILES = ['Content Shell.app', 'ffmpegsumo.so', 'osmesa.so', + 'lib'] + CHROMEDRIVER_FILES = ['chromedriver'] +elif HOST_OS == 'linux': + VERSION_FILE = 'VERSION' + CONTENTSHELL_FILES = ['content_shell', 'content_shell.pak', 'fonts.conf', + 'libffmpegsumo.so', 'libosmesa.so', 'lib'] + CHROMEDRIVER_FILES = ['chromedriver'] +elif HOST_OS == 'win': + VERSION_FILE = 'VERSION' + # TODO: provide proper list. + CONTENTSHELL_FILES = ['content_shell.exe', 'AHEM____.ttf'] + CHROMEDRIVER_FILES = ['chromedriver.exe'] +else: + raise Exception('Unsupported platform') + +# Append a file with size of the snapshot. +CONTENTSHELL_FILES.append('snapshot-size.txt') + + +def GenerateVersionFile(): + # TODO: fix it. + if HOST_OS == 'win': return + versionInfo = utils.getCommandOutput(os.path.join('..', '..', + 'dartium_tools', + 'print_dart_version.sh')) + file = open(VERSION_FILE, 'w') + file.write(versionInfo) + file.close() + + +def GenerateDartiumFileList(mode, srcpath): + def blacklisted(name): + # We include everything if this is a debug build. + if mode.lower() == 'debug': + return True + else: + # We don't include .debug/.pdb files if this is a release build. + if name.endswith('.debug') or name.endswith('.pdb'): + return False + return True + + configFile = os.path.join(srcpath, 'chrome', 'tools', 'build', HOST_OS, + 'FILES.cfg') + configNamespace = {} + execfile(configFile, configNamespace) + fileList = [file['filename'] for file in configNamespace['FILES']] + + # The debug version of dartium on our bots build with + # 'component=shared_library', so we need to include all libraries + # (i.e. 'lib/*.so) as we do on the CONTENTSHELL_FILES list above. + if HOST_OS == 'linux' and mode.lower() == 'debug': + fileList.append('lib') + + # Filter out files we've blacklisted and don't want to include. + fileList = filter(blacklisted, fileList) + return fileList + + +def GenerateContentShellFileList(srcpath): + return CONTENTSHELL_FILES + + +def GenerateChromeDriverFileList(srcpath): + return CHROMEDRIVER_FILES + + +def ZipDir(zipFile, directory): + if HOST_OS == 'win': + cmd = os.path.normpath(os.path.join( + os.path.dirname(__file__), + '../third_party/lzma_sdk/Executable/7za.exe')) + options = ['a', '-r', '-tzip'] + else: + cmd = 'zip' + options = ['-yr'] + utils.runCommand([cmd] + options + [zipFile, directory]) + + +def GenerateZipFile(zipFile, stageDir, fileList): + # Stage files. + for fileName in fileList: + fileName = fileName.rstrip(os.linesep) + targetName = os.path.join(stageDir, fileName) + try: + targetDir = os.path.dirname(targetName) + if not os.path.exists(targetDir): + os.makedirs(targetDir) + if os.path.isdir(fileName): + # TODO: This is a hack to handle duplicates on the fileList of the + # form: [ 'lib/foo.so', 'lib' ] + if os.path.exists(targetName) and os.path.isdir(targetName): + shutil.rmtree(targetName) + shutil.copytree(fileName, targetName) + elif os.path.exists(fileName): + shutil.copy2(fileName, targetName) + except: + import traceback + print 'Troubles processing %s [cwd=%s]: %s' % (fileName, os.getcwd(), traceback.format_exc()) + + ZipDir(zipFile, stageDir) + + +def StageAndZip(fileList, target): + if not target: + return None + + stageDir = target + zipFile = stageDir + '.zip' + + # Cleanup old files. + if os.path.exists(stageDir): + shutil.rmtree(stageDir) + os.mkdir(stageDir) + oldFiles = glob.glob(target.split('-')[0] + '*.zip') + for oldFile in oldFiles: + os.remove(oldFile) + + GenerateVersionFile() + GenerateZipFile(zipFile, stageDir, fileList) + print 'last change: %s' % (zipFile) + + # Clean up. Buildbot disk space is limited. + shutil.rmtree(stageDir) + + return zipFile + + +def Archive(srcpath, mode, dartium_target, contentshell_target, + chromedriver_target, is_win_ninja=False): + # We currently build using ninja on mac debug. + if HOST_OS == 'mac': + releaseDir = os.path.join(srcpath, 'out', mode) + # Also package dynamic libraries. + extra_files = [file for file in os.listdir(releaseDir) if file.endswith('.dylib')] + elif HOST_OS == 'linux': + releaseDir = os.path.join(srcpath, 'out', mode) + extra_files = [] + elif HOST_OS == 'win': + if is_win_ninja: + releaseDir = os.path.join(srcpath, 'out', mode) + else: + releaseDir = os.path.join(srcpath, 'build', mode) + extra_files = [] + else: + raise Exception('Unsupported platform') + os.chdir(releaseDir) + + dartium_zip = StageAndZip( + GenerateDartiumFileList(mode, srcpath) + extra_files, dartium_target) + contentshell_zip = StageAndZip(GenerateContentShellFileList(srcpath) + extra_files, + contentshell_target) + chromedriver_zip = StageAndZip(GenerateChromeDriverFileList(srcpath) + extra_files, + chromedriver_target) + return (dartium_zip, contentshell_zip, chromedriver_zip) + + +def main(): + pathname = os.path.dirname(sys.argv[0]) + fullpath = os.path.abspath(pathname) + srcpath = os.path.join(fullpath, '..') + + parser = optparse.OptionParser() + parser.add_option('--dartium', dest='dartium', + action='store', type='string', + help='dartium archive name') + parser.add_option('--contentshell', dest='contentshell', + action='store', type='string', + help='content shell archive name') + parser.add_option('--chromedriver', dest='chromedriver', + action='store', type='string', + help='chromedriver archive name') + parser.add_option('--mode', dest='mode', + default='Release', + action='store', type='string', + help='(Release|Debug)') + (options, args) = parser.parse_args() + Archive(srcpath, options.mode, options.dartium, options.contentshell, + options.chromedriver) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/dartium_tools/build.py b/dartium_tools/build.py new file mode 100755 index 0000000..e7ae4fb --- /dev/null +++ b/dartium_tools/build.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# +# Copyright 2010 Google Inc. All Rights Reserved. + +# This file is used by the buildbot. + +import optparse +import os.path +import utils + +ALL_TARGETS = [ + 'content_shell', + 'chrome', + 'pkg_packages', +] + +def main(): + parser = optparse.OptionParser() + parser.add_option('--target', dest='target', + default='all', + action='store', type='string', + help='Target (%s)' % ', '.join(ALL_TARGETS)) + parser.add_option('--mode', dest='mode', + action='store', type='string', + help='Build mode (Debug or Release)') + parser.add_option('--clobber', dest='clobber', + action='store_true', + help='Clobber the output directory') + parser.add_option('-j', '--jobs', dest='jobs', + action='store', + help='Number of jobs') + (options, args) = parser.parse_args() + mode = options.mode + if options.jobs: + jobs = options.jobs + else: + jobs = utils.guessCpus() + if not (mode in ['Debug', 'Release']): + raise Exception('Invalid build mode') + + if options.target == 'all': + targets = ALL_TARGETS + else: + targets = [options.target] + + if options.clobber: + utils.runCommand(['rm', '-rf', 'out']) + + utils.runCommand(['ninja', + '-j%s' % jobs, + '-C', + os.path.join('out', mode)] + + targets) + +if __name__ == '__main__': + main() diff --git a/dartium_tools/buildbot_annotated_steps.py b/dartium_tools/buildbot_annotated_steps.py new file mode 100755 index 0000000..27de121 --- /dev/null +++ b/dartium_tools/buildbot_annotated_steps.py @@ -0,0 +1,364 @@ +#!/usr/bin/python + +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Chromium buildbot steps + +Run the Dart layout tests. +""" + +import os +import platform +import re +import shutil +import socket +import subprocess +import sys +import imp + +BUILDER_NAME = 'BUILDBOT_BUILDERNAME' +REVISION = 'BUILDBOT_REVISION' +BUILDER_PATTERN = (r'^dartium-(mac|lucid64|lucid32|win)' + r'-(full|inc|debug)(-ninja)?(-(be|dev|stable))?$') + +if platform.system() == 'Windows': + GSUTIL = 'e:/b/build/scripts/slave/gsutil.bat' +else: + GSUTIL = '/b/build/scripts/slave/gsutil' +ACL = 'public-read' +GS_SITE = 'gs://' +GS_URL = 'https://sandbox.google.com/storage/' +GS_DIR = 'dartium-archive' +LATEST = 'latest' +CONTINUOUS = 'continuous' + +REVISION_FILE = 'chrome/browser/ui/webui/dartvm_revision.h' + +# Add dartium tools and build/util to python path. +SRC_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TOOLS_PATH = os.path.join(SRC_PATH, 'dartium_tools') +DART_PATH = os.path.join(SRC_PATH, 'dart') +BUILD_UTIL_PATH = os.path.join(SRC_PATH, 'build/util') +# We limit testing on drt since it takes a long time to run +DRT_FILTER = 'html' + + +sys.path.extend([TOOLS_PATH, BUILD_UTIL_PATH]) +import archive +import utils + +bot_utils = imp.load_source('bot_utils', + os.path.join(DART_PATH, 'tools', 'bots', 'bot_utils.py')) + +def DartArchiveFile(local_path, remote_path, create_md5sum=False): + # Copy it to the new unified gs://dart-archive bucket + # TODO(kustermann/ricow): Remove all the old archiving code, once everything + # points to the new location + gsutil = bot_utils.GSUtil() + gsutil.upload(local_path, remote_path, public=True) + if create_md5sum: + # 'local_path' may have a different filename than 'remote_path'. So we need + # to make sure the *.md5sum file contains the correct name. + assert '/' in remote_path and not remote_path.endswith('/') + mangled_filename = remote_path[remote_path.rfind('/') + 1:] + local_md5sum = bot_utils.CreateChecksumFile(local_path, mangled_filename) + gsutil.upload(local_md5sum, remote_path + '.md5sum', public=True) + +def UploadDartiumVariant(revision, name, channel, arch, mode, zip_file): + name = name.replace('drt', 'content_shell') + system = sys.platform + + namer = bot_utils.GCSNamer(channel, bot_utils.ReleaseType.RAW) + remote_path = namer.dartium_variant_zipfilepath(revision, name, system, arch, + mode) + DartArchiveFile(zip_file, remote_path, create_md5sum=True) + return remote_path + +def ExecuteCommand(cmd): + """Execute a command in a subprocess. + """ + print 'Executing: ' + ' '.join(cmd) + try: + pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (output, error) = pipe.communicate() + if pipe.returncode != 0: + print 'Execution failed: ' + str(error) + return (pipe.returncode, output) + except: + import traceback + print 'Execution raised exception:', traceback.format_exc() + return (-1, '') + + +# TODO: Instead of returning a tuple we should make a class with these fields. +def GetBuildInfo(): + """Returns a tuple (name, dart_revision, version, mode, arch, channel, + is_full) where: + - name: A name for the build - the buildbot host if a buildbot. + - dart_revision: The dart revision. + - version: A version string corresponding to this build. + - mode: 'Debug' or 'Release' + - arch: target architecture + - channel: the channel this build is happening on + - is_full: True if this is a full build. + """ + os.chdir(SRC_PATH) + + name = None + version = None + mode = 'Release' + + # Populate via builder environment variables. + name = os.environ[BUILDER_NAME] + + # We need to chdir() to src/dart in order to get the correct revision number. + with utils.ChangedWorkingDirectory(DART_PATH): + dart_tools_utils = imp.load_source('dart_tools_utils', + os.path.join('tools', 'utils.py')) + dart_revision = dart_tools_utils.GetSVNRevision() + + version = dart_revision + '.0' + is_incremental = '-inc' in name + is_win_ninja = 'win-inc-ninja' in name + is_full = False + + pattern = re.match(BUILDER_PATTERN, name) + assert pattern + arch = 'x64' if pattern.group(1) == 'lucid64' else 'ia32' + if pattern.group(2) == 'debug': + mode = 'Debug' + is_full = pattern.group(2) == 'full' + channel = pattern.group(5) + if not channel: + channel = 'be' + + # Fall back if not on builder. + if not name: + name = socket.gethostname().split('.')[0] + + return (name, dart_revision, version, mode, arch, channel, is_full, + is_incremental, is_win_ninja) + + +def RunDartTests(mode, component, suite, arch, checked, test_filter=None, + is_win_ninja=False): + """Runs the Dart WebKit Layout tests. + """ + cmd = [sys.executable] + script = os.path.join(TOOLS_PATH, 'test.py') + cmd.append(script) + cmd.append('--buildbot') + cmd.append('--mode=' + mode) + cmd.append('--component=' + component) + cmd.append('--suite=' + suite) + cmd.append('--arch=' + arch) + cmd.append('--' + checked) + cmd.append('--no-show-results') + + if is_win_ninja: + cmd.append('--win-ninja-build') + + if test_filter: + cmd.append('--test-filter=' + test_filter) + + status = subprocess.call(cmd) + if status != 0: + print '@@@STEP_FAILURE@@@' + return status + + +def UploadDartTestsResults(layout_test_results_dir, name, version, + component, checked): + """Uploads test results to google storage. + """ + print ('@@@BUILD_STEP archive %s_layout_%s_tests results@@@' % + (component, checked)) + dir_name = os.path.dirname(layout_test_results_dir) + base_name = os.path.basename(layout_test_results_dir) + cwd = os.getcwd() + os.chdir(dir_name) + + archive_name = 'layout_test_results.zip' + archive.ZipDir(archive_name, base_name) + + target = '/'.join([GS_DIR, 'layout-test-results', name, component + '-' + + checked + '-' + version + '.zip']) + status = UploadArchive(os.path.abspath(archive_name), GS_SITE + target) + os.remove(archive_name) + if status == 0: + print ('@@@STEP_LINK@download@' + GS_URL + target + '@@@') + else: + print '@@@STEP_FAILURE@@@' + os.chdir(cwd) + + +def ListArchives(pattern): + """List the contents in Google storage matching the file pattern. + """ + cmd = [GSUTIL, 'ls', pattern] + (status, output) = ExecuteCommand(cmd) + if status != 0: + return [] + return output.split(os.linesep) + + +def RemoveArchives(archives): + """Remove the list of archives in Google storage. + """ + for archive in archives: + if archive.find(GS_SITE) == 0: + cmd = [GSUTIL, 'rm', archive.rstrip()] + (status, _) = ExecuteCommand(cmd) + if status != 0: + return status + return 0 + + +def UploadArchive(source, target): + """Upload an archive zip file to Google storage. + """ + + # Upload file. + cmd = [GSUTIL, 'cp', source, target] + (status, output) = ExecuteCommand(cmd) + if status != 0: + return status + print 'Uploaded: ' + output + + # Set ACL. + if ACL is not None: + cmd = [GSUTIL, 'setacl', ACL, target] + (status, output) = ExecuteCommand(cmd) + return status + + +def main(): + (dartium_bucket, dart_revision, version, mode, arch, channel, + is_full, is_incremental, is_win_ninja) = GetBuildInfo() + drt_bucket = dartium_bucket.replace('dartium', 'drt') + chromedriver_bucket = dartium_bucket.replace('dartium', 'chromedriver') + + def archiveAndUpload(archive_latest=False): + print '@@@BUILD_STEP dartium_generate_archive@@@' + cwd = os.getcwd() + dartium_archive = dartium_bucket + '-' + version + drt_archive = drt_bucket + '-' + version + chromedriver_archive = chromedriver_bucket + '-' + version + dartium_zip, drt_zip, chromedriver_zip = \ + archive.Archive(SRC_PATH, mode, dartium_archive, + drt_archive, chromedriver_archive, + is_win_ninja=is_win_ninja) + status = upload('dartium', dartium_bucket, os.path.abspath(dartium_zip), + archive_latest=archive_latest) + if status == 0: + status = upload('drt', drt_bucket, os.path.abspath(drt_zip), + archive_latest=archive_latest) + if status == 0: + status = upload('chromedriver', chromedriver_bucket, + os.path.abspath(chromedriver_zip), + archive_latest=archive_latest) + os.chdir(cwd) + if status != 0: + print '@@@STEP_FAILURE@@@' + return status + + def upload(module, bucket, zip_file, archive_latest=False): + status = 0 + + # We archive to the new location on all builders except for -inc builders. + if not is_incremental: + print '@@@BUILD_STEP %s_upload_archive_new @@@' % module + # We archive the full builds to gs://dart-archive/ + revision = 'latest' if archive_latest else dart_revision + remote_path = UploadDartiumVariant(revision, module, channel, arch, + mode.lower(), zip_file) + print '@@@STEP_LINK@download@' + remote_path + '@@@' + + # We archive to the old locations only for bleeding_edge builders + if channel == 'be': + _, filename = os.path.split(zip_file) + if not archive_latest: + target = '/'.join([GS_DIR, bucket, filename]) + print '@@@BUILD_STEP %s_upload_archive@@@' % module + status = UploadArchive(zip_file, GS_SITE + target) + print '@@@STEP_LINK@download@' + GS_URL + target + '@@@' + else: + print '@@@BUILD_STEP %s_upload_latest@@@' % module + # Clear latest for this build type. + old = '/'.join([GS_DIR, LATEST, bucket + '-*']) + old_archives = ListArchives(GS_SITE + old) + + # Upload the new latest and remove unnecessary old ones. + target = GS_SITE + '/'.join([GS_DIR, LATEST, filename]) + status = UploadArchive(zip_file, target) + if status == 0: + RemoveArchives( + [iarch for iarch in old_archives if iarch != target]) + else: + print 'Upload failed' + + # Upload unversioned name to continuous site for incremental + # builds. + if '-inc' in bucket: + continuous_name = bucket[:bucket.find('-inc')] + target = GS_SITE + '/'.join([GS_DIR, CONTINUOUS, + continuous_name + '.zip']) + status = UploadArchive(zip_file, target) + + print ('@@@BUILD_STEP %s_upload_archive is over (status = %s)@@@' % + (module, status)) + + return status + + def test(component, suite, checked, test_filter=None): + """Test a particular component (e.g., dartium or frog). + """ + print '@@@BUILD_STEP %s_%s_%s_tests@@@' % (component, suite, checked) + sys.stdout.flush() + layout_test_results_dir = os.path.join(SRC_PATH, 'webkit', mode, + 'layout-test-results') + shutil.rmtree(layout_test_results_dir, ignore_errors=True) + status = RunDartTests(mode, component, suite, arch, checked, + test_filter=test_filter, is_win_ninja=is_win_ninja) + + if suite == 'layout' and status != 0: + UploadDartTestsResults(layout_test_results_dir, dartium_bucket, version, + component, checked) + return status + + result = 0 + + # Always archive to the revision bucket. + result = archiveAndUpload(archive_latest=False) + + # On dev/stable we archive to the latest bucket as well + if channel != 'be': + result = archiveAndUpload(archive_latest=True) or result + + # Run layout tests + if mode == 'Release' or platform.system() != 'Darwin': + result = test('drt', 'layout', 'unchecked') or result + result = test('drt', 'layout', 'checked') or result + + # Run dartium tests + result = test('dartium', 'core', 'unchecked') or result + result = test('dartium', 'core', 'checked') or result + + # Run ContentShell tests + # NOTE: We don't run ContentShell tests on dartium-*-inc builders to keep + # cycle times down. + if not is_incremental: + # If we run all checked tests on dartium, we restrict the number of + # unchecked tests on drt to DRT_FILTER + result = test('drt', 'core', 'unchecked', test_filter=DRT_FILTER) or result + result = test('drt', 'core', 'checked') or result + + # On the 'be' channel, we only archive to the latest bucket if all tests ran + # successfull. + if result == 0 and channel == 'be': + result = archiveAndUpload(archive_latest=True) or result + +if __name__ == '__main__': + sys.exit(main()) diff --git a/dartium_tools/export_overrides.py b/dartium_tools/export_overrides.py new file mode 100755 index 0000000..0c046bf --- /dev/null +++ b/dartium_tools/export_overrides.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# +# Copyright 2012 Google Inc. All Rights Reserved. + +# TODO(vsm): Remove this file once we remove the reference from +# dartium.deps/DEPS. diff --git a/dartium_tools/generate_dart_vm_version.py b/dartium_tools/generate_dart_vm_version.py new file mode 100755 index 0000000..d5183e26 --- /dev/null +++ b/dartium_tools/generate_dart_vm_version.py @@ -0,0 +1,34 @@ +import datetime +import imp +import os +import subprocess +import sys +import time + +utils = imp.load_source('utils', 'src/dart/tools/utils.py') + + +REVISION_FILE = 'src/chrome/browser/ui/webui/dartvm_revision.h' +EXPIRATION_FILE = 'src/third_party/WebKit/Source/bindings/dart/ExpirationTimeSecsSinceEpoch.time_t' + +def updateFile(filename, content): + if os.path.exists(filename): + if file(filename, 'r').read() == content: + return + else: + dir = os.path.dirname(filename) + if not os.path.exists(dir): + os.makedirs(dir) + file(filename, 'w').write(content) + +def main(): + dart_version = utils.GetVersion() + version_string = '#define DART_VM_REVISION "%s"\n' % dart_version.strip() + + updateFile(REVISION_FILE, version_string) + + expiration_date = datetime.date.today() + datetime.timedelta(weeks=12) + updateFile(EXPIRATION_FILE, "%dLL\n" % time.mktime(expiration_date.timetuple())) + +if __name__ == '__main__': + main() diff --git a/dartium_tools/print_dart_version.sh b/dartium_tools/print_dart_version.sh new file mode 100755 index 0000000..273e0d5 --- /dev/null +++ b/dartium_tools/print_dart_version.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Print svn revisions for Dartium internal repositories. The output +# is included in each Dartium archive build / release. +# +# This script is necessary because Dartium maintains its own branches +# of Chromium and WebKit. This script is for temporary use only; it +# will not be integrated back into Chromium. + +function version() { + if [ $(svnversion) == exported ] + then + # git-svn + git svn info | grep Revision | cut -c 11- + else + # svn + echo $(svnversion) + fi +} + +root_dir=$(dirname $0)/../.. +pushd ${root_dir} > /dev/null +echo dartium-chromium: $(version) +cd third_party/WebKit +echo dartium-webkit: $(version) +cd ../../dart/runtime +echo dartium-runtime: $(version) +popd > /dev/null diff --git a/dartium_tools/roll_forward.py b/dartium_tools/roll_forward.py new file mode 100755 index 0000000..de3bc4a --- /dev/null +++ b/dartium_tools/roll_forward.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# +# Copyright 2012 Google Inc. All Rights Reserved. + +import os +import re +import shutil +import subprocess +import sys +import update_patched_files +import urllib + + +def GetLkgr(): + f = urllib.urlopen('http://chromium-status.appspot.com/lkgr') + try: + return int(f.read()) + finally: + f.close() + + +def ReadDepsVars(path): + exec_globals = { + 'Var': lambda name: exec_globals['vars'][name], + } + execfile(path, exec_globals) + return exec_globals['vars'] + + +def GetRevision(path, name): + return int(ReadDepsVars(path)[name]) + + +def main(argv): + CHROMIUM_DEPS_FILE = 'DEPS' + DARTIUM_DEPS_FILE = '../dartium.deps/DEPS' + CHROMIUM_DEPS_COPY = '../dartium.deps/DEPS.chromium' + REV_PATTERN = '"chromium_revision": "(\d+)",' + + deps = file(DARTIUM_DEPS_FILE).read() + current_chrome_rev = int(re.search(REV_PATTERN, deps).group(1)) + + if len(argv) < 2: + next_chrome_rev = GetLkgr() + else: + next_chrome_rev = int(argv[1]) + + print 'Chromium roll: %d -> %d' % (current_chrome_rev, next_chrome_rev) + + if current_chrome_rev == next_chrome_rev: + return + + # Update patched files. + os.chdir('..') + update_patched_files.update_overridden_files(current_chrome_rev, next_chrome_rev) + os.chdir('src') + + # Update DEPS. + subprocess.check_call(['svn', 'up', '-r', str(current_chrome_rev), CHROMIUM_DEPS_FILE]) + current_webkit_rev = GetRevision(CHROMIUM_DEPS_FILE, 'webkit_revision') + subprocess.check_call(['svn', 'up', '-r', str(next_chrome_rev), CHROMIUM_DEPS_FILE]) + next_webkit_rev = GetRevision(CHROMIUM_DEPS_FILE, 'webkit_revision') + + shutil.copyfile(CHROMIUM_DEPS_FILE, CHROMIUM_DEPS_COPY) + deps = deps.replace('"chromium_revision": "%d",' % current_chrome_rev, '"chromium_revision": "%d",' % next_chrome_rev) + file(DARTIUM_DEPS_FILE, 'w').write(deps) + + # Do webkit roll. + WEBKIT_DIR = 'third_party/WebKit' + subprocess.check_call(['git', 'svn', 'rebase'], cwd=WEBKIT_DIR) + print 'WebKit roll: %d -> %d' % (current_webkit_rev, next_webkit_rev) + + if current_webkit_rev < next_webkit_rev: + subprocess.check_call(['bash', + '../../dartium_tools/roll_webkit.sh', + str(current_webkit_rev), str(next_webkit_rev)], cwd=WEBKIT_DIR) + + # Update the checkout. + subprocess.check_call(['gclient', 'sync', '-j17']) + + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/dartium_tools/roll_webkit.sh b/dartium_tools/roll_webkit.sh new file mode 100755 index 0000000..48077f3 --- /dev/null +++ b/dartium_tools/roll_webkit.sh @@ -0,0 +1,30 @@ +#!/bin/bash -e + +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script does WebKit roll provided old and new svn revisions. + +gitSha() { + git log --format=%h --grep "git-svn-id: svn://svn.chromium.org/blink/trunk@${1}" blink/master +} + +git checkout master +git svn rebase +git fetch blink + +old_svn_rev=$1 +new_svn_rev=$2 + +old_rev="$(gitSha $old_svn_rev)" +new_rev="$(gitSha $new_svn_rev)" + +merge_branch_name="merge-${old_svn_rev}-${new_svn_rev}" + +git checkout -b ${merge_branch_name} ${old_rev} +git diff ${old_rev} ${new_rev} --binary | git apply --binary --index +# git cherry-pick --no-commit ${old_rev}..${new_rev} +git commit -m "MERGE: ${old_svn_rev}-${new_svn_rev}." +git rebase --onto master ${merge_branch_name}~1 ${merge_branch_name} +
\ No newline at end of file diff --git a/dartium_tools/supplement.gypi b/dartium_tools/supplement.gypi new file mode 100644 index 0000000..a0e5306 --- /dev/null +++ b/dartium_tools/supplement.gypi @@ -0,0 +1,9 @@ +{ + 'includes': [ + '../third_party/WebKit/Source/bindings/dart/gyp/overrides.gypi', + ], + 'variables': { + # Fixes mysterious forge issue. + 'use_custom_freetype': 0, + }, +} diff --git a/dartium_tools/test.py b/dartium_tools/test.py new file mode 100755 index 0000000..df382f0 --- /dev/null +++ b/dartium_tools/test.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python +# +# Copyright 2011 Google Inc. All Rights Reserved. + +import fnmatch +import optparse +import os +import re +import shutil +import subprocess +import sys +import urllib +import utils + +SCRIPT_TAG = '<script type="application/%s" src="%s"></script>\n' + +DART_TEST_DIR = os.path.join('dart') + +DART_VM_FLAGS = [ + ] +DART_VM_CHECKED_FLAGS = DART_VM_FLAGS + [ + '--enable_type_checks', + '--warning_as_error', + ] + +TEST_DRT_FLAGS = [ + '--compiler=none', + '--runtime=drt', + '--drt=%(drt)s', + '--mode=%(mode)s', + '--arch=%(arch)s', + '--build-directory=%(build_dir)s', + '--report', + '--time', + ] + +TEST_DRT_CHECKED_FLAGS = TEST_DRT_FLAGS + [ + '--checked', + ] + +TEST_DARTIUM_FLAGS = [ + '--compiler=none', + '--runtime=dartium', + '--dartium=%(dartium)s', + '--mode=%(mode)s', + '--build-directory=%(build_dir)s', + '--report', + '--time', + ] + +TEST_DARTIUM_CHECKED_FLAGS = TEST_DARTIUM_FLAGS + [ + '--checked', + ] + +TEST_INFO = { + 'dartium': { + 'core': { + 'checked': TEST_DARTIUM_CHECKED_FLAGS, + 'unchecked': TEST_DARTIUM_FLAGS, + }, + }, + 'drt': { + 'layout': { + 'checked': DART_VM_CHECKED_FLAGS, + 'unchecked': DART_VM_FLAGS, + }, + 'core': { + 'checked': TEST_DRT_CHECKED_FLAGS, + 'unchecked': TEST_DRT_FLAGS, + }, + }, +} + +COMPONENTS = TEST_INFO.keys() +SUITES = [ 'layout', 'core' ] + +def main(): + parser = optparse.OptionParser() + parser.add_option('--mode', dest='mode', + action='store', type='string', + help='Test mode (Debug or Release)') + parser.add_option('--component', dest='component', + default='drt', + action='store', type='string', + help='Execution mode (dartium, drt or all)') + parser.add_option('--suite', dest='suite', + default='all', + action='store', type='string', + help='Test suite (layout, core, or all)') + parser.add_option('--arch', dest='arch', + default='ia32', + action='store', type='string', + help='Target architecture') + parser.add_option('--no-show-results', action='store_false', + default=True, dest='show_results', + help='Don\'t launch a browser with results ' + 'after the tests are done') + parser.add_option('--checked', action='store_true', + default=False, dest='checked', + help='Run Dart code in checked mode') + parser.add_option('--unchecked', action='store_true', + default=False, dest='unchecked', + help='Run Dart code in unchecked mode') + parser.add_option('--buildbot', action='store_true', + default=False, dest='buildbot', + help='Print results in buildbot format') + parser.add_option('--layout-test', dest='layout_test', + default=None, + action='store', type='string', + help='Single layout test to run if set') + parser.add_option('--test-filter', dest='test_filter', + default=None, + action='store', type='string', + help='Test filter for core tests') + parser.add_option('--win-ninja-build', action='store_true', + default=False, dest='is_win_ninja', + help='We are on windows and use ninja for building.') + + (options, args) = parser.parse_args() + mode = options.mode + if not (mode in ['Debug', 'Release']): + raise Exception('Invalid test mode') + + if options.component == 'all': + components = COMPONENTS + elif not (options.component in COMPONENTS): + raise Exception('Invalid component %s' % options.component) + else: + components = [ options.component ] + + if options.suite == 'all': + suites = SUITES + elif not (options.suite in SUITES): + raise Exception('Invalid suite %s' % options.suite) + else: + suites = [ options.suite ] + + # If --checked or --unchecked not present, run with both. + checkmodes = ['unchecked', 'checked'] + if options.checked or options.unchecked: + checkmodes = [] + if options.unchecked: checkmodes.append('unchecked') + if options.checked: checkmodes.append('checked') + + pathname = os.path.dirname(sys.argv[0]) + fullpath = os.path.abspath(pathname) + srcpath = os.path.normpath(os.path.join(fullpath, '..')) + + test_mode = '' + timeout = 30000 + if mode == 'Debug': + test_mode = '--debug' + timeout = 60000 + + show_results = '' + if not options.show_results: + show_results = '--no-show-results' + + host_os = utils.guessOS() + if options.is_win_ninja: + host_os = 'win-ninja' + build_root, drt_path, dartium_path, dart_path = { + 'mac': ( + 'out', + os.path.join('Content Shell.app', 'Contents', 'MacOS', 'Content Shell'), + os.path.join('Chromium.app', 'Contents', 'MacOS', 'Chromium'), + 'dart', + ), + 'linux': ('out', 'content_shell', 'chrome', 'dart'), + 'win': ('build', 'content_shell.exe', 'chrome.exe', 'dart.exe'), + 'win-ninja': ('out', 'content_shell.exe', 'chrome.exe', 'dart.exe'), + }[host_os] + + build_dir = os.path.join(srcpath, build_root, mode) + + executable_map = { + 'mode': mode.lower(), + 'build_dir': os.path.relpath(build_dir), + 'drt': os.path.join(build_dir, drt_path), + 'dartium': os.path.join(build_dir, dartium_path), + 'dart': os.path.join(build_dir, dart_path), + 'arch': options.arch, + } + + test_script = os.path.join(srcpath, 'webkit', 'tools', 'layout_tests', + 'run_webkit_tests.py') + + errors = False + for component in components: + for checkmode in checkmodes: + # Capture errors and report at the end. + try: + if ('layout' in suites and + 'layout' in TEST_INFO[component] and + checkmode in TEST_INFO[component]['layout']): + # Run layout tests in this mode + dart_flags = ' '.join(TEST_INFO[component]['layout'][checkmode]) + + if options.layout_test: + test = os.path.join(DART_TEST_DIR, options.layout_test) + else: + test = DART_TEST_DIR + package_root = os.path.join(build_dir, 'packages') + utils.runCommand(['python', + test_script, + test_mode, + show_results, + '--time-out-ms', str(timeout), + # Temporary hack to fix issue with svn vs. svn.bat. + '--builder-name', 'BuildBot', + '--additional-env-var', + 'DART_FLAGS=%s' % dart_flags, + '--additional-env-var', + 'DART_PACKAGE_ROOT=file://%s' % package_root, + test]) + + # Run core dart tests + if ('core' in suites and + 'core' in TEST_INFO[component] and + checkmode in TEST_INFO[component]['core']): + core_flags = TEST_INFO[component]['core'][checkmode] + core_flags = map(lambda flag: flag % executable_map, core_flags) + if options.buildbot: + core_flags = ['--progress=buildbot'] + core_flags + tester = os.path.join(srcpath, 'dart', 'tools', 'test.py') + test_filter = [options.test_filter] if options.test_filter else [] + utils.runCommand(['python', tester] + core_flags + test_filter) + except (StandardError, Exception) as e: + print 'Fail: ' + str(e) + errors = True + + if errors: + return 1 + else: + return 0 + +if __name__ == '__main__': + try: + sys.exit(main()) + except StandardError as e: + print 'Fail: ' + str(e) + sys.exit(1) diff --git a/dartium_tools/update_deps.py b/dartium_tools/update_deps.py new file mode 100755 index 0000000..3cc81d5 --- /dev/null +++ b/dartium_tools/update_deps.py @@ -0,0 +1,226 @@ +#!/usr/bin/python + +# Update Dartium DEPS automatically. + +from datetime import datetime, timedelta +import optparse +import os +import re +from subprocess import Popen, PIPE +import sys +from time import strptime + +# Instructions: +# +# To run locally: +# (a) Create and change to a directory to run the updater in: +# > mkdir /usr/local/google/home/$USER/dartium_deps_updater +# > cd /usr/local/google/home/$USER/dartium_deps_updater +# +# (b) Checkout a copy of the DEPS for the updater to process / update: +# > svn co https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps +# +# (c) Checkout dartium_tools (with this script) using the current branch instead of 1650: +# > svn co svn://svn.chromium.org/chrome/branches/dart/1650/src/dartium_tools +# +# (d) If your home directory is remote, consider redefining it for this shell/script: +# > cp -R $HOME/.subversion /usr/local/google/home/$USER +# > export HOME=/usr/local/google/home/$USER +# +# (e) Test by running (Ctrl-C to quit): +# > ./dartium_tools/update_deps.py +# +# (f) Run periodical update: +# > while true; do ./dartium_tools/update_deps.py --force ; sleep 300 ; done + +######################################################################## +# Repositories to auto-update +######################################################################## + +# Each element in this map represents a repository to update. Entries +# take the form: +# (repo_tag: (svn_url, view_url)) +# +# The repo_tag must match the DEPS revision entry. I.e, there must be +# an entry of the form: +# 'dartium_%s_revision' % repo_tag +# to roll forward. +# +# The view_url should be parameterized by revision number. This is +# used to generated the commit message. +REPOSITORY_INFO = { + 'webkit': ( + 'http://src.chromium.org/blink/branches/dart/1650', + 'http://src.chromium.org/viewvc/blink/branches/dart/1650?view=rev&revision=%s'), + 'chromium': ( + 'http://src.chromium.org/chrome/branches/dart/1650/src', + 'http://src.chromium.org/viewvc/chrome/branches/dart/1650/src?view=rev&revision=%s'), +} + +REPOSITORIES = REPOSITORY_INFO.keys() + +######################################################################## +# Actions +######################################################################## + +def write_file(filename, content): + f = open(filename, "w") + f.write(content) + f.close() + +def run_cmd(cmd): + print "\n[%s]\n$ %s" % (os.getcwd(), " ".join(cmd)) + pipe = Popen(cmd, stdout=PIPE, stderr=PIPE) + output = pipe.communicate() + if pipe.returncode == 0: + return output[0] + else: + print output[1] + print "FAILED. RET_CODE=%d" % pipe.returncode + sys.exit(pipe.returncode) + +def parse_iso_time(s): + pair = s.rsplit(' ', 1) + d = datetime.strptime(pair[0], '%Y-%m-%d %H:%M:%S') + offset = timedelta(hours=int(pair[1][0:3])) + return d - offset + +def parse_git_log(output, repo): + if len(output) < 4: + return [] + lst = output.split(os.linesep) + lst = [s.strip('\'') for s in lst] + lst = [s.split(',', 3) for s in lst] + lst = [{'repo': repo, + 'rev': s[0], + 'isotime':s[1], + 'author': s[2], + 'utctime': parse_iso_time(s[1]), + 'info': s[3]} for s in lst] + return lst + +def parse_svn_log(output, repo): + lst = output.split(os.linesep) + lst = [s.strip('\'') for s in lst] + output = '_LINESEP_'.join(lst) + lst = output.split('------------------------------------------------------------------------') + lst = [s.replace('_LINESEP_', '\n') for s in lst] + lst = [s.strip('\n') for s in lst] + lst = [s.strip(' ') for s in lst] + lst = [s for s in lst if len(s) > 0] + pattern = re.compile(' \| (\d+) line(s|)') + lst = [pattern.sub(' | ', s) for s in lst] + lst = [s.split(' | ', 3) for s in lst] + lst = [{'repo': repo, + 'rev': s[0].replace('r', ''), + 'author': s[1], + 'isotime':s[2][0:25], + 'utctime': parse_iso_time(s[2][0:25]), + 'info': s[3].split('\n')[2]} for s in lst] + return lst + +def commit_url(repo, rev): + numrev = rev.replace('r', '') + if repo in REPOSITORIES: + (_, view_url) = REPOSITORY_INFO[repo] + return view_url % numrev + else: + raise Exception('Unknown repo'); + +def find_max(revs): + max_time = None + max_position = None + for i, rev in enumerate(revs): + if rev == []: + continue + if max_time is None or rev[0]['utctime'] > max_time: + max_time = rev[0]['utctime'] + max_position = i + return max_position + +def merge_revs(revs): + position = find_max(revs) + if position is None: + return [] + item = revs[position][0] + revs[position] = revs[position][1:] + return [item] + merge_revs(revs) + +def main(): + option_parser = optparse.OptionParser() + option_parser.add_option('', '--force', help="Push DEPS update to server without prompting", action="store_true", dest="force") + options, args = option_parser.parse_args() + + src_dir = "/usr/local/google/home/%s/dartium_deps_updater/dartium.deps" % os.environ["USER"] + os.putenv("GIT_PAGER", "") + + if not os.path.exists(src_dir): + print "Error: prior to running this script, you need to check out a Dartium source tree at" + print " %s" % src_dir + print "Please reserve the above directory for this script and do not use it for other purposes." + sys.exit(1) + + os.chdir(src_dir) + + # parse DEPS + deps = run_cmd(['svn', 'cat', 'https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps/DEPS']) + rev_num = {} + for repo in REPOSITORIES: + revision = 'dartium_%s_revision":\s*"(.+)"' % repo + rev_num[repo] = re.search(revision, deps).group(1) + + # update repos + all_revs = [] + for repo, (svn_url, _) in REPOSITORY_INFO.items(): + output = run_cmd(["svn", "log", "-r", "HEAD:%s" % rev_num[repo], svn_url]) + revs = parse_svn_log(output, repo) + if revs and revs[-1]['rev'] == rev_num[repo]: + revs.pop() + all_revs.append(revs) + + pending_updates = merge_revs(all_revs) + pending_updates.reverse() + + print + print "Current DEPS revisions:" + for repo in REPOSITORIES: + print ' dartium_%s_revision=%s' % (repo, rev_num[repo]) + + if len(pending_updates) == 0: + print "DEPS is up-to-date." + sys.exit(0) + else: + print "Pending DEPS updates:" + for s in pending_updates: + print " %s to %s (%s) %s" % (s['repo'], s['rev'], s['isotime'], s['info']) + + # make the next DEPS update + os.chdir(src_dir) + run_cmd(['rm', 'DEPS']) + print run_cmd(['svn', 'update']) + s = pending_updates[0] + + pattern = re.compile('dartium_' + s['repo'] + '_revision":\s*"(.+)"') + new_deps = pattern.sub('dartium_' + s['repo'] + '_revision": "' + s['rev'] + '"', deps) + write_file('DEPS', new_deps) + + commit_log = 'DEPS AutoUpdate: %s to %s (%s) %s\n' % (s['repo'], s['rev'], s['isotime'], s['author']) + commit_log += s['info'] + '\n' + commit_url(s['repo'], s['rev']) + + write_file('commit_log.txt', commit_log) + print run_cmd(['svn', 'diff']) + print + print "Commit log:" + print "---------------------------------------------" + print commit_log + print "---------------------------------------------" + + if not options.force: + print "Ready to push; press Enter to continue or Control-C to abort..." + sys.stdin.readline() + print run_cmd(['svn', 'commit', '--file', 'commit_log.txt']) + print "Done." + + +if '__main__' == __name__: + main() diff --git a/dartium_tools/update_patched_files.py b/dartium_tools/update_patched_files.py new file mode 100755 index 0000000..37e9543 --- /dev/null +++ b/dartium_tools/update_patched_files.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# +# Copyright 2012 Google Inc. All Rights Reserved. + +import overrides_database +import shutil +import subprocess +import sys + + +def svn_update(path, rev): + subprocess.call(['svn', 'up', '-r', str(rev), path]) + + +def update_overridden_files(old_rev, new_rev): + assert old_rev < new_rev + for override in overrides_database.OVERRIDDEN_FILES: + patched = override['modified'] + orig = override['original'] + svn_update(orig, old_rev) + shutil.copyfile(patched, orig) + svn_update(orig, new_rev) + shutil.copyfile(orig, patched) + + +if __name__ == '__main__': + update_overridden_files(int(sys.argv[1]), int(sys.argv[2])) diff --git a/dartium_tools/update_version.py b/dartium_tools/update_version.py new file mode 100755 index 0000000..020eea3 --- /dev/null +++ b/dartium_tools/update_version.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# +# Copyright 2012 Google Inc. All Rights Reserved. + +import subprocess +import sys + +def FetchSVNRevision(): + try: + proc = subprocess.Popen(['svn', 'info'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd='src/dart', + shell=(sys.platform=='win32')) + except OSError: + # command is apparently either not installed or not executable. + return None + if not proc: + return None + + for line in proc.stdout: + line = line.strip() + if not line: + continue + key, val = line.split(': ', 1) + if key == 'Revision': + return val + + return None + + +def main(): + revision = FetchSVNRevision() + path = 'src/chrome/VERSION' + text = file(path).readlines() + text[2] = 'BUILD=d%s\n' % revision + file(path, 'w').writelines(text) + +if __name__ == '__main__': + main() diff --git a/dartium_tools/utils.py b/dartium_tools/utils.py new file mode 100755 index 0000000..e2a50ed --- /dev/null +++ b/dartium_tools/utils.py @@ -0,0 +1,123 @@ +# Copyright 2010 Google Inc. All Rights Reserved. + +# This file contains a set of utilities functions used +# by both SConstruct and other Python-based scripts. + +import commands +import os +import platform +import re +import subprocess + +class ChangedWorkingDirectory(object): + def __init__(self, new_dir): + self._new_dir = new_dir + + def __enter__(self): + self._old_dir = os.getcwd() + os.chdir(self._new_dir) + return self._new_dir + + def __exit__(self, *_): + os.chdir(self._old_dir) + +# Try to guess the host operating system. +def guessOS(): + id = platform.system() + if id == "Linux": + return "linux" + elif id == "Darwin": + return "mac" + elif id == "Windows" or id == "Microsoft": + # On Windows Vista platform.system() can return "Microsoft" with some + # versions of Python, see http://bugs.python.org/issue1082 for details. + return "win" + else: + return None + + +# Try to guess the host architecture. +def guessArchitecture(): + id = platform.machine() + if id.startswith('arm'): + return 'arm' + elif (not id) or (not re.match('(x|i[3-6])86', id) is None): + return 'x86' + elif id == 'i86pc': + return 'x86' + else: + return None + + +# Try to guess the number of cpus on this machine. +def guessCpus(): + if os.path.exists("/proc/cpuinfo"): + return int(commands.getoutput("grep -E '^processor' /proc/cpuinfo | wc -l")) + if os.path.exists("/usr/bin/hostinfo"): + return int(commands.getoutput('/usr/bin/hostinfo | grep "processors are logically available." | awk "{ print \$1 }"')) + win_cpu_count = os.getenv("NUMBER_OF_PROCESSORS") + if win_cpu_count: + return int(win_cpu_count) + return int(os.getenv("PARFAIT_NUMBER_OF_CORES", 2)) + + +# Returns true if we're running under Windows. +def isWindows(): + return guessOS() == 'win32' + +# Reads a text file into an array of strings - one for each +# line. Strips comments in the process. +def readLinesFrom(name): + result = [] + for line in open(name): + if '#' in line: + line = line[:line.find('#')] + line = line.strip() + if len(line) == 0: + continue + result.append(line) + return result + +def listArgCallback(option, opt_str, value, parser): + if value is None: + value = [] + + for arg in parser.rargs: + if arg[:2].startswith('--'): + break + value.append(arg) + + del parser.rargs[:len(value)] + setattr(parser.values, option.dest, value) + + +def getCommandOutput(cmd): + print cmd + pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output = pipe.communicate() + if pipe.returncode == 0: + return output[0] + else: + print output[1] + raise Exception('Failed to run command. return code=%s' % pipe.returncode) + +def runCommand(cmd, env_update=None): + if env_update is None: + env_update = {} + print 'Running: ' + ' '.join(["%s='%s'" % (k, v) for k, v in env_update.iteritems()]) + ' ' + ' '.join(cmd) + env_copy = dict(os.environ.items()) + env_copy.update(env_update) + p = subprocess.Popen(cmd, env=env_copy) + if p.wait() != 0: + raise Exception('Failed to run command. return code=%s' % p.returncode) + +def main(argv): + print "GuessOS() -> ", guessOS() + print "GuessArchitecture() -> ", guessArchitecture() + print "GuessCpus() -> ", guessCpus() + print "IsWindows() -> ", isWindows() + + +if __name__ == "__main__": + import sys + main(sys.argv) diff --git a/net/base/mime_util.cc b/net/base/mime_util.cc index 4679dcc..c0b8d36 100644 --- a/net/base/mime_util.cc +++ b/net/base/mime_util.cc @@ -63,6 +63,7 @@ class MimeUtil : public PlatformMimeUtil { bool IsSupportedNonImageMimeType(const std::string& mime_type) const; bool IsUnsupportedTextMimeType(const std::string& mime_type) const; bool IsSupportedJavascriptMimeType(const std::string& mime_type) const; + bool IsSupportedDartMimeType(const std::string& mime_type) const; bool IsSupportedMimeType(const std::string& mime_type) const; @@ -109,6 +110,7 @@ class MimeUtil : public PlatformMimeUtil { MimeMappings non_image_map_; MimeMappings unsupported_text_map_; MimeMappings javascript_map_; + MimeMappings dart_map_; MimeMappings codecs_map_; StrictMappings strict_format_map_; @@ -150,6 +152,7 @@ static const MimeInfo secondary_mappings[] = { { "application/pdf", "pdf" }, { "application/postscript", "ps,eps,ai" }, { "application/javascript", "js" }, + { "application/dart", "dart" }, { "application/font-woff", "woff" }, { "image/bmp", "bmp" }, { "image/x-icon", "ico" }, @@ -426,6 +429,10 @@ static bool IsCodecSupportedOnAndroid(const std::string& codec) { } #endif +static const char* const supported_dart_types[] = { + "application/dart", +}; + struct MediaFormatStrict { const char* mime_type; const char* codecs_list; @@ -468,6 +475,8 @@ void MimeUtil::InitializeMimeTypeMaps() { unsupported_text_map_.insert(unsupported_text_types[i]); for (size_t i = 0; i < arraysize(supported_javascript_types); ++i) non_image_map_.insert(supported_javascript_types[i]); + for (size_t i = 0; i < arraysize(supported_dart_types); ++i) + non_image_map_.insert(supported_dart_types[i]); for (size_t i = 0; i < arraysize(common_media_types); ++i) non_image_map_.insert(common_media_types[i]); #if defined(USE_PROPRIETARY_CODECS) @@ -485,6 +494,8 @@ void MimeUtil::InitializeMimeTypeMaps() { for (size_t i = 0; i < arraysize(supported_javascript_types); ++i) javascript_map_.insert(supported_javascript_types[i]); + for (size_t i = 0; i < arraysize(supported_dart_types); ++i) + dart_map_.insert(supported_dart_types[i]); for (size_t i = 0; i < arraysize(common_media_codecs); ++i) { #if defined(OS_ANDROID) @@ -542,6 +553,11 @@ bool MimeUtil::IsSupportedJavascriptMimeType( return javascript_map_.find(mime_type) != javascript_map_.end(); } +bool MimeUtil::IsSupportedDartMimeType( + const std::string& mime_type) const { + return dart_map_.find(mime_type) != dart_map_.end(); +} + // Mirrors WebViewImpl::CanShowMIMEType() bool MimeUtil::IsSupportedMimeType(const std::string& mime_type) const { return (mime_type.compare(0, 6, "image/") == 0 && @@ -764,6 +780,10 @@ bool IsSupportedJavascriptMimeType(const std::string& mime_type) { return g_mime_util.Get().IsSupportedJavascriptMimeType(mime_type); } +bool IsSupportedDartMimeType(const std::string& mime_type) { + return g_mime_util.Get().IsSupportedDartMimeType(mime_type); +} + bool IsSupportedMimeType(const std::string& mime_type) { return g_mime_util.Get().IsSupportedMimeType(mime_type); } diff --git a/net/base/mime_util.h b/net/base/mime_util.h index 9662e96..d5e15af 100644 --- a/net/base/mime_util.h +++ b/net/base/mime_util.h @@ -44,6 +44,7 @@ NET_EXPORT bool IsSupportedMediaMimeType(const std::string& mime_type); NET_EXPORT bool IsSupportedNonImageMimeType(const std::string& mime_type); NET_EXPORT bool IsUnsupportedTextMimeType(const std::string& mime_type); NET_EXPORT bool IsSupportedJavascriptMimeType(const std::string& mime_type); +NET_EXPORT bool IsSupportedDartMimeType(const std::string& mime_type); NET_EXPORT bool IsSupportedCertificateMimeType(const std::string& mime_type); // Convenience function. diff --git a/net/base/network_change_notifier_win.cc b/net/base/network_change_notifier_win.cc index 77a72b0..135905e 100644 --- a/net/base/network_change_notifier_win.cc +++ b/net/base/network_change_notifier_win.cc @@ -54,6 +54,7 @@ class NetworkChangeNotifierWin::DnsConfigServiceThread : public base::Thread { NetworkChangeNotifierWin::NetworkChangeNotifierWin() : NetworkChangeNotifier(NetworkChangeCalculatorParamsWin()), is_watching_(false), + network_change_event_handle_(NULL), sequential_failures_(0), weak_factory_(this), dns_config_service_thread_(new DnsConfigServiceThread()), @@ -222,10 +223,15 @@ void NetworkChangeNotifierWin::OnObjectSignaled(HANDLE object) { DCHECK(is_watching_); is_watching_ = false; + DWORD bytes; + BOOL network_changed = GetOverlappedResult(network_change_event_handle_, &addr_overlapped_, &bytes, TRUE); + // Start watching for the next address change. WatchForAddressChange(); - NotifyObservers(); + // If network_changed is 0 an error occured (e.g. GetLastError() = 995 = ERROR_OPERATION_ABORTED). + if (network_changed != 0) + NotifyObservers(); } void NetworkChangeNotifierWin::NotifyObservers() { @@ -295,8 +301,8 @@ bool NetworkChangeNotifierWin::WatchForAddressChangeInternal() { base::Thread::Options(base::MessageLoop::TYPE_IO, 0)); } - HANDLE handle = NULL; - DWORD ret = NotifyAddrChange(&handle, &addr_overlapped_); + + DWORD ret = NotifyAddrChange(&network_change_event_handle_, &addr_overlapped_); if (ret != ERROR_IO_PENDING) return false; diff --git a/net/base/network_change_notifier_win.h b/net/base/network_change_notifier_win.h index 7b75c15..e32b950 100644 --- a/net/base/network_change_notifier_win.h +++ b/net/base/network_change_notifier_win.h @@ -90,6 +90,10 @@ class NET_EXPORT_PRIVATE NetworkChangeNotifierWin base::win::ObjectWatcher addr_watcher_; OVERLAPPED addr_overlapped_; + // This file handle receives network change notifications and is used for calling + // GetOverlappedResult(). + HANDLE network_change_event_handle_; + base::OneShotTimer<NetworkChangeNotifierWin> timer_; // Number of times WatchForAddressChange has failed in a row. |