summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorjamescook@chromium.org <jamescook@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-17 16:50:56 +0000
committerjamescook@chromium.org <jamescook@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-17 16:50:56 +0000
commit9a5bb05bd1e76b7923382dc058ca958818febce3 (patch)
treea192c51fde864453816cdd9465331a57e1c66443
parentb8faca2407cf7e3692d7ce9ff0a7aafc9a76cae1 (diff)
downloadchromium_src-9a5bb05bd1e76b7923382dc058ca958818febce3.zip
chromium_src-9a5bb05bd1e76b7923382dc058ca958818febce3.tar.gz
chromium_src-9a5bb05bd1e76b7923382dc058ca958818febce3.tar.bz2
Revert accidental dartium code push
This reverts r264517 to r264538 from terry@google.com BUG=none TEST=none NOTRY=true NOTREECHECKS=true TBR=terry@google.com Review URL: https://codereview.chromium.org/239993009 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@264545 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--OWNERS11
-rw-r--r--android_webview/common/aw_content_client.cc2
-rw-r--r--build/all.gyp12
-rw-r--r--build/common.gypi5
-rw-r--r--chrome/browser/resources/about_version.html3
-rw-r--r--chrome/browser/ui/webui/devtools_ui.cc19
-rw-r--r--chrome/browser/ui/webui/version_ui.cc3
-rw-r--r--chrome/common/chrome_content_client.cc9
-rwxr-xr-xchrome/tools/build/mac/verify_order2
-rw-r--r--codereview.settings12
-rw-r--r--content/browser/browser_main_loop.cc2
-rw-r--r--content/browser/webui/url_data_manager_backend.cc4
-rw-r--r--content/child/simple_webmimeregistry_impl.cc7
-rw-r--r--content/child/simple_webmimeregistry_impl.h2
-rw-r--r--content/shell/common/shell_content_client.cc1
-rw-r--r--content/worker/worker_webkitplatformsupport_impl.cc6
-rw-r--r--content/worker/worker_webkitplatformsupport_impl.h2
-rwxr-xr-xdartium_tools/archive.py204
-rwxr-xr-xdartium_tools/build.py57
-rwxr-xr-xdartium_tools/buildbot_annotated_steps.py374
-rwxr-xr-xdartium_tools/export_overrides.py6
-rwxr-xr-xdartium_tools/fetch_reference_build.py48
-rwxr-xr-xdartium_tools/generate_dart_vm_version.py34
-rwxr-xr-xdartium_tools/get_chromium_build.py171
-rwxr-xr-xdartium_tools/print_dart_version.sh28
-rwxr-xr-xdartium_tools/roll_forward.py84
-rwxr-xr-xdartium_tools/roll_webkit.sh30
-rwxr-xr-xdartium_tools/set_reference_build_revision.py29
-rw-r--r--dartium_tools/supplement.gypi9
-rwxr-xr-xdartium_tools/test.py242
-rwxr-xr-xdartium_tools/update_deps.py226
-rwxr-xr-xdartium_tools/update_patched_files.py27
-rwxr-xr-xdartium_tools/update_version.py40
-rwxr-xr-xdartium_tools/utils.py123
-rw-r--r--net/base/mime_util.cc20
-rw-r--r--net/base/mime_util.h1
-rw-r--r--net/base/network_change_notifier_win.cc12
-rw-r--r--net/base/network_change_notifier_win.h4
38 files changed, 20 insertions, 1851 deletions
diff --git a/OWNERS b/OWNERS
index b20a5f88..ace39e2 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,12 +1,5 @@
-asiva@google.com
-blois@google.com
-iposva@google.com
-jacobr@google.com
-kustermann@google.com
-ricow@google.com
-vsm@google.com
-whesse@google.com
-zra@google.com
+darin@chromium.org
+ben@chromium.org
per-file .gitignore=*
per-file .gn=brettw@chromium.org
per-file BUILD.gn=brettw@chromium.org
diff --git a/android_webview/common/aw_content_client.cc b/android_webview/common/aw_content_client.cc
index 90a91e0..b1071c1 100644
--- a/android_webview/common/aw_content_client.cc
+++ b/android_webview/common/aw_content_client.cc
@@ -27,7 +27,7 @@ namespace android_webview {
std::string GetUserAgent() {
// "Version/4.0" had been hardcoded in the legacy WebView.
- std::string product = "Version/4.0 (Dart) " + GetProduct();
+ std::string product = "Version/4.0 " + GetProduct();
if (CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseMobileUserAgent)) {
product += " Mobile";
diff --git a/build/all.gyp b/build/all.gyp
index f95401a..651982f 100644
--- a/build/all.gyp
+++ b/build/all.gyp
@@ -243,18 +243,6 @@
],
}, # target_name: All_syzygy
{
- 'target_name': 'dartium_builder',
- 'type': 'none',
- 'dependencies': [
- '../dart/pkg/pkg.gyp:pkg_packages',
- # '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
- '../chrome/chrome.gyp:chrome',
- '../chrome/chrome.gyp:chromedriver',
- '../content/content_shell_and_tests.gyp:content_shell',
- 'blink_tests',
- ],
- },
- {
# Note: Android uses android_builder_tests below.
# TODO: Consider merging that with this target.
'target_name': 'chromium_builder_tests',
diff --git a/build/common.gypi b/build/common.gypi
index d156799..ebec615 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -420,7 +420,7 @@
'enable_extensions%': 1,
# Enable Google Now.
- 'enable_google_now%': 0,
+ 'enable_google_now%': 1,
# Enable printing support and UI. This variable is used to configure
# which parts of printing will be built. 0 disables printing completely,
@@ -1148,7 +1148,7 @@
# flag allows us to have warnings as errors in general to prevent
# regressions in most modules, while working on the bits that are
# remaining.
- 'win_third_party_warn_as_error%': 'false',
+ 'win_third_party_warn_as_error%': 'true',
# Clang stuff.
'clang%': '<(clang)',
@@ -2727,7 +2727,6 @@
}],
[ 'OS=="mac" or OS=="ios"', {
'xcode_settings': {
- 'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
},
'conditions': [
diff --git a/chrome/browser/resources/about_version.html b/chrome/browser/resources/about_version.html
index a5c1cb0..4acece3 100644
--- a/chrome/browser/resources/about_version.html
+++ b/chrome/browser/resources/about_version.html
@@ -74,9 +74,6 @@ about:version template page
<span i18n-content="js_version"></span>
</td>
</tr>
- <tr><td class="label">Dart</td>
- <td class="version" id="dart_version" i18n-content="dart_version"></td>
- </tr>
<if expr="not is_android">
<tr><td class="label" i18n-content="flash_plugin"></td>
<td class="version" id="flash_version" i18n-content="flash_version"></td>
diff --git a/chrome/browser/ui/webui/devtools_ui.cc b/chrome/browser/ui/webui/devtools_ui.cc
index 5792f2e..d3cff30 100644
--- a/chrome/browser/ui/webui/devtools_ui.cc
+++ b/chrome/browser/ui/webui/devtools_ui.cc
@@ -95,12 +95,6 @@ std::string GetMimeTypeForPath(const std::string& path) {
return "image/png";
} else if (EndsWith(filename, ".gif", false)) {
return "image/gif";
- } else if (EndsWith(filename, ".svg", false)) {
- return "image/svg+xml";
- } else if (EndsWith(filename, ".ttf", false)) {
- return "application/octet-stream";
- } else if (EndsWith(filename, ".woff", false)) {
- return "application/font-woff";
} else if (EndsWith(filename, ".manifest", false)) {
return "text/cache-manifest";
}
@@ -186,14 +180,6 @@ class DevToolsDataSource : public content::URLDataSource {
}
virtual bool ShouldAddContentSecurityPolicy() const OVERRIDE {
- // Required as the Dart Observatory is bundled as its own iframe hosted on
- // chrome-devtools.
- return true;
- }
-
- virtual bool ShouldDenyXFrameOptions() const OVERRIDE {
- // Required as the Dart Observatory is bundled as its own iframe hosted on
- // chrome-devtools.
return false;
}
@@ -201,11 +187,6 @@ class DevToolsDataSource : public content::URLDataSource {
return true;
}
- virtual std::string GetContentSecurityPolicyFrameSrc() const OVERRIDE {
- // The Dart Observatory is bundled as its own iframe.
- return "frame-src chrome-devtools://devtools/bundled/Observatory/index_devtools.html;";
- }
-
private:
virtual ~DevToolsDataSource() {}
scoped_refptr<net::URLRequestContextGetter> request_context_;
diff --git a/chrome/browser/ui/webui/version_ui.cc b/chrome/browser/ui/webui/version_ui.cc
index a5b790f..e07eb52 100644
--- a/chrome/browser/ui/webui/version_ui.cc
+++ b/chrome/browser/ui/webui/version_ui.cc
@@ -36,8 +36,6 @@
#include "chrome/browser/ui/webui/version_handler_chromeos.h"
#endif
-#include "dartvm_revision.h" // NOLINT
-
namespace {
content::WebUIDataSource* CreateVersionUIDataSource(Profile* profile) {
@@ -56,7 +54,6 @@ content::WebUIDataSource* CreateVersionUIDataSource(Profile* profile) {
html_source->AddString("blink_version", content::GetWebKitVersion());
html_source->AddString("js_engine", "V8");
html_source->AddString("js_version", v8::V8::GetVersion());
- html_source->AddString("dart_version", DART_VM_REVISION);
#if defined(OS_ANDROID)
html_source->AddLocalizedString("application_label",
diff --git a/chrome/common/chrome_content_client.cc b/chrome/common/chrome_content_client.cc
index fc9149b..e245a0e 100644
--- a/chrome/common/chrome_content_client.cc
+++ b/chrome/common/chrome_content_client.cc
@@ -504,14 +504,7 @@ std::string ChromeContentClient::GetProduct() const {
}
std::string ChromeContentClient::GetUserAgent() const {
- std::string product = GetProduct();
- product += " (Dart)";
-#if defined(OS_ANDROID)
- CommandLine* command_line = CommandLine::ForCurrentProcess();
- if (command_line->HasSwitch(switches::kUseMobileUserAgent))
- product += " Mobile";
-#endif
- return webkit_glue::BuildUserAgentFromProduct(product);
+ return ::GetUserAgent();
}
base::string16 ChromeContentClient::GetLocalizedString(int message_id) const {
diff --git a/chrome/tools/build/mac/verify_order b/chrome/tools/build/mac/verify_order
index bd432ba..3d5d644 100755
--- a/chrome/tools/build/mac/verify_order
+++ b/chrome/tools/build/mac/verify_order
@@ -13,8 +13,6 @@
# This script can be used to verify that all of the global text symbols in
# a Mach-O file are accounted for in an order file.
-exit 0
-
if [ ${#} -ne 2 ] ; then
echo "usage: ${0} LAST_SYMBOL MACH_O_FILE" >& 2
exit 1
diff --git a/codereview.settings b/codereview.settings
index ab89519..93335d3 100644
--- a/codereview.settings
+++ b/codereview.settings
@@ -1,4 +1,10 @@
# This file is used by gcl to get repository specific information.
-CODE_REVIEW_SERVER: http://codereview.chromium.org
-CC_LIST: reviews+dom@dartlang.org
-VIEW_VC: http://src.chromium.org/viewvc/multivm/branches/1650/chrome?view=rev&revision=
+CODE_REVIEW_SERVER: codereview.chromium.org
+CC_LIST: chromium-reviews@chromium.org
+VIEW_VC: https://src.chromium.org/viewvc/chrome?view=rev&revision=
+STATUS: http://chromium-status.appspot.com/status
+TRY_ON_UPLOAD: True
+TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try
+GITCL_PREUPLOAD: http://src.chromium.org/viewvc/trunk/tools/depot_tools/git-cl-upload-hook?revision=HEAD&root=chrome
+GITCL_PREDCOMMIT: http://src.chromium.org/viewvc/trunk/tools/depot_tools/git-cl-upload-hook?revision=HEAD&root=chrome
+LINT_IGNORE_REGEX: webkit/api/.*
diff --git a/content/browser/browser_main_loop.cc b/content/browser/browser_main_loop.cc
index 4357be0..9659231 100644
--- a/content/browser/browser_main_loop.cc
+++ b/content/browser/browser_main_loop.cc
@@ -133,7 +133,7 @@ void SetupSandbox(const CommandLine& parsed_command_line) {
scoped_ptr<sandbox::SetuidSandboxClient> setuid_sandbox_client(
sandbox::SetuidSandboxClient::Create());
- const bool want_setuid_sandbox = false &&
+ const bool want_setuid_sandbox =
!parsed_command_line.HasSwitch(switches::kNoSandbox) &&
!parsed_command_line.HasSwitch(switches::kDisableSetuidSandbox) &&
!setuid_sandbox_client->IsDisabledViaEnvironment();
diff --git a/content/browser/webui/url_data_manager_backend.cc b/content/browser/webui/url_data_manager_backend.cc
index a4d1ff7..f2ccafb 100644
--- a/content/browser/webui/url_data_manager_backend.cc
+++ b/content/browser/webui/url_data_manager_backend.cc
@@ -49,12 +49,10 @@ namespace content {
namespace {
-// TODO(jacobr) remove https://www.google.com when the dependency on the
-// Google Charts API is removed from the Dart Observatory.
// TODO(tsepez) remove unsafe-eval when bidichecker_packaged.js fixed.
const char kChromeURLContentSecurityPolicyHeaderBase[] =
"Content-Security-Policy: script-src chrome://resources "
- "'self' https://www.google.com 'unsafe-eval'; ";
+ "'self' 'unsafe-eval'; ";
const char kChromeURLXFrameOptionsHeader[] = "X-Frame-Options: DENY";
diff --git a/content/child/simple_webmimeregistry_impl.cc b/content/child/simple_webmimeregistry_impl.cc
index 205f08a..3b3ebd6 100644
--- a/content/child/simple_webmimeregistry_impl.cc
+++ b/content/child/simple_webmimeregistry_impl.cc
@@ -40,13 +40,6 @@ WebMimeRegistry::SupportsType
WebMimeRegistry::IsSupported : WebMimeRegistry::IsNotSupported;
}
-WebMimeRegistry::SupportsType
- SimpleWebMimeRegistryImpl::supportsDartMIMEType(
- const WebString& mime_type) {
- return net::IsSupportedDartMimeType(ToASCIIOrEmpty(mime_type)) ?
- WebMimeRegistry::IsSupported : WebMimeRegistry::IsNotSupported;
-}
-
// When debugging layout tests failures in the test shell,
// see TestShellWebMimeRegistryImpl.
WebMimeRegistry::SupportsType SimpleWebMimeRegistryImpl::supportsMediaMIMEType(
diff --git a/content/child/simple_webmimeregistry_impl.h b/content/child/simple_webmimeregistry_impl.h
index 2474ea3..4f88015 100644
--- a/content/child/simple_webmimeregistry_impl.h
+++ b/content/child/simple_webmimeregistry_impl.h
@@ -30,8 +30,6 @@ class CONTENT_EXPORT SimpleWebMimeRegistryImpl :
const blink::WebString&);
virtual blink::WebMimeRegistry::SupportsType supportsJavaScriptMIMEType(
const blink::WebString&);
- virtual blink::WebMimeRegistry::SupportsType supportsDartMIMEType(
- const blink::WebString&);
virtual blink::WebMimeRegistry::SupportsType supportsMediaMIMEType(
const blink::WebString&,
const blink::WebString&,
diff --git a/content/shell/common/shell_content_client.cc b/content/shell/common/shell_content_client.cc
index a56bc53..f3bfc52 100644
--- a/content/shell/common/shell_content_client.cc
+++ b/content/shell/common/shell_content_client.cc
@@ -23,7 +23,6 @@ ShellContentClient::~ShellContentClient() {
std::string ShellContentClient::GetUserAgent() const {
std::string product = "Chrome/" CONTENT_SHELL_VERSION;
- product += " (Dart)";
CommandLine* command_line = CommandLine::ForCurrentProcess();
if (command_line->HasSwitch(switches::kUseMobileUserAgent))
product += " Mobile";
diff --git a/content/worker/worker_webkitplatformsupport_impl.cc b/content/worker/worker_webkitplatformsupport_impl.cc
index e078b2a..3a57f94 100644
--- a/content/worker/worker_webkitplatformsupport_impl.cc
+++ b/content/worker/worker_webkitplatformsupport_impl.cc
@@ -242,12 +242,6 @@ WorkerWebKitPlatformSupportImpl::supportsJavaScriptMIMEType(const WebString&) {
}
WebMimeRegistry::SupportsType
-WorkerWebKitPlatformSupportImpl::supportsDartMIMEType(const WebString&) {
- NOTREACHED();
- return WebMimeRegistry::IsSupported;
-}
-
-WebMimeRegistry::SupportsType
WorkerWebKitPlatformSupportImpl::supportsMediaMIMEType(
const WebString&, const WebString&, const WebString&) {
NOTREACHED();
diff --git a/content/worker/worker_webkitplatformsupport_impl.h b/content/worker/worker_webkitplatformsupport_impl.h
index cb66a3f..9fa56d4 100644
--- a/content/worker/worker_webkitplatformsupport_impl.h
+++ b/content/worker/worker_webkitplatformsupport_impl.h
@@ -83,8 +83,6 @@ class WorkerWebKitPlatformSupportImpl : public BlinkPlatformImpl,
const blink::WebString&);
virtual blink::WebMimeRegistry::SupportsType supportsJavaScriptMIMEType(
const blink::WebString&);
- virtual blink::WebMimeRegistry::SupportsType supportsDartMIMEType(
- const blink::WebString&);
virtual blink::WebMimeRegistry::SupportsType supportsMediaMIMEType(
const blink::WebString&,
const blink::WebString&,
diff --git a/dartium_tools/archive.py b/dartium_tools/archive.py
deleted file mode 100755
index 268a264..0000000
--- a/dartium_tools/archive.py
+++ /dev/null
@@ -1,204 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import optparse
-import os
-import shutil
-import subprocess
-import sys
-import utils
-
-HOST_OS = utils.guessOS()
-
-if HOST_OS == 'mac':
- VERSION_FILE = 'Chromium.app/Contents/MacOS/VERSION'
- CONTENTSHELL_FILES = ['Content Shell.app', 'ffmpegsumo.so', 'osmesa.so',
- 'lib']
- CHROMEDRIVER_FILES = ['chromedriver']
-elif HOST_OS == 'linux':
- VERSION_FILE = 'VERSION'
- CONTENTSHELL_FILES = ['content_shell', 'content_shell.pak', 'fonts.conf',
- 'libffmpegsumo.so', 'libosmesa.so', 'lib',
- 'icudtl.dat']
- CHROMEDRIVER_FILES = ['chromedriver']
-elif HOST_OS == 'win':
- VERSION_FILE = 'VERSION'
- # TODO: provide proper list.
- CONTENTSHELL_FILES = ['content_shell.exe', 'AHEM____.ttf']
- CHROMEDRIVER_FILES = ['chromedriver.exe']
-else:
- raise Exception('Unsupported platform')
-
-# Append a file with size of the snapshot.
-CONTENTSHELL_FILES.append('snapshot-size.txt')
-
-
-def GenerateVersionFile():
- # TODO: fix it.
- if HOST_OS == 'win': return
- versionInfo = utils.getCommandOutput(os.path.join('..', '..',
- 'dartium_tools',
- 'print_dart_version.sh'))
- file = open(VERSION_FILE, 'w')
- file.write(versionInfo)
- file.close()
-
-
-def GenerateDartiumFileList(mode, srcpath):
- def blacklisted(name):
- # We include everything if this is a debug build.
- if mode.lower() == 'debug':
- return True
- else:
- # We don't include .debug/.pdb files if this is a release build.
- if name.endswith('.debug') or name.endswith('.pdb'):
- return False
- return True
-
- configFile = os.path.join(srcpath, 'chrome', 'tools', 'build', HOST_OS,
- 'FILES.cfg')
- configNamespace = {}
- execfile(configFile, configNamespace)
- fileList = [file['filename'] for file in configNamespace['FILES']]
-
- # The debug version of dartium on our bots build with
- # 'component=shared_library', so we need to include all libraries
- # (i.e. 'lib/*.so) as we do on the CONTENTSHELL_FILES list above.
- if HOST_OS == 'linux' and mode.lower() == 'debug':
- fileList.append('lib')
-
- # Filter out files we've blacklisted and don't want to include.
- fileList = filter(blacklisted, fileList)
- return fileList
-
-
-def GenerateContentShellFileList(srcpath):
- return CONTENTSHELL_FILES
-
-
-def GenerateChromeDriverFileList(srcpath):
- return CHROMEDRIVER_FILES
-
-
-def ZipDir(zipFile, directory):
- if HOST_OS == 'win':
- cmd = os.path.normpath(os.path.join(
- os.path.dirname(__file__),
- '../third_party/lzma_sdk/Executable/7za.exe'))
- options = ['a', '-r', '-tzip']
- else:
- cmd = 'zip'
- options = ['-yr']
- utils.runCommand([cmd] + options + [zipFile, directory])
-
-
-def GenerateZipFile(zipFile, stageDir, fileList):
- # Stage files.
- for fileName in fileList:
- fileName = fileName.rstrip(os.linesep)
- targetName = os.path.join(stageDir, fileName)
- try:
- targetDir = os.path.dirname(targetName)
- if not os.path.exists(targetDir):
- os.makedirs(targetDir)
- if os.path.isdir(fileName):
- # TODO: This is a hack to handle duplicates on the fileList of the
- # form: [ 'lib/foo.so', 'lib' ]
- if os.path.exists(targetName) and os.path.isdir(targetName):
- shutil.rmtree(targetName)
- shutil.copytree(fileName, targetName)
- elif os.path.exists(fileName):
- shutil.copy2(fileName, targetName)
- except:
- import traceback
- print 'Troubles processing %s [cwd=%s]: %s' % (fileName, os.getcwd(), traceback.format_exc())
-
- ZipDir(zipFile, stageDir)
-
-
-def StageAndZip(fileList, target):
- if not target:
- return None
-
- stageDir = target
- zipFile = stageDir + '.zip'
-
- # Cleanup old files.
- if os.path.exists(stageDir):
- shutil.rmtree(stageDir)
- os.mkdir(stageDir)
- oldFiles = glob.glob(target.split('-')[0] + '*.zip')
- for oldFile in oldFiles:
- os.remove(oldFile)
-
- GenerateVersionFile()
- GenerateZipFile(zipFile, stageDir, fileList)
- print 'last change: %s' % (zipFile)
-
- # Clean up. Buildbot disk space is limited.
- shutil.rmtree(stageDir)
-
- return zipFile
-
-
-def Archive(srcpath, mode, dartium_target, contentshell_target,
- chromedriver_target, is_win_ninja=False):
- # We currently build using ninja on mac debug.
- if HOST_OS == 'mac':
- releaseDir = os.path.join(srcpath, 'out', mode)
- # Also package dynamic libraries.
- extra_files = [file for file in os.listdir(releaseDir) if file.endswith('.dylib')]
- elif HOST_OS == 'linux':
- releaseDir = os.path.join(srcpath, 'out', mode)
- extra_files = []
- elif HOST_OS == 'win':
- if is_win_ninja:
- releaseDir = os.path.join(srcpath, 'out', mode)
- else:
- releaseDir = os.path.join(srcpath, 'out', mode)
- # issue(16760) - we _need_ to fix our parsing of the FILES.cfg
- extra_files = [file for file in os.listdir(releaseDir) if file.endswith('manifest')]
- else:
- raise Exception('Unsupported platform')
- os.chdir(releaseDir)
-
- dartium_zip = StageAndZip(
- GenerateDartiumFileList(mode, srcpath) + extra_files, dartium_target)
- contentshell_zip = StageAndZip(GenerateContentShellFileList(srcpath) + extra_files,
- contentshell_target)
- chromedriver_zip = StageAndZip(GenerateChromeDriverFileList(srcpath) + extra_files,
- chromedriver_target)
- return (dartium_zip, contentshell_zip, chromedriver_zip)
-
-
-def main():
- pathname = os.path.dirname(sys.argv[0])
- fullpath = os.path.abspath(pathname)
- srcpath = os.path.join(fullpath, '..')
-
- parser = optparse.OptionParser()
- parser.add_option('--dartium', dest='dartium',
- action='store', type='string',
- help='dartium archive name')
- parser.add_option('--contentshell', dest='contentshell',
- action='store', type='string',
- help='content shell archive name')
- parser.add_option('--chromedriver', dest='chromedriver',
- action='store', type='string',
- help='chromedriver archive name')
- parser.add_option('--mode', dest='mode',
- default='Release',
- action='store', type='string',
- help='(Release|Debug)')
- (options, args) = parser.parse_args()
- Archive(srcpath, options.mode, options.dartium, options.contentshell,
- options.chromedriver)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/dartium_tools/build.py b/dartium_tools/build.py
deleted file mode 100755
index df511b0..0000000
--- a/dartium_tools/build.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc. All Rights Reserved.
-
-# This file is used by the buildbot.
-
-import optparse
-import os.path
-import utils
-
-ALL_TARGETS = [
- 'content_shell',
- 'chrome',
- 'blink_tests',
- 'pkg_packages',
-]
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option('--target', dest='target',
- default='all',
- action='store', type='string',
- help='Target (%s)' % ', '.join(ALL_TARGETS))
- parser.add_option('--mode', dest='mode',
- action='store', type='string',
- help='Build mode (Debug or Release)')
- parser.add_option('--clobber', dest='clobber',
- action='store_true',
- help='Clobber the output directory')
- parser.add_option('-j', '--jobs', dest='jobs',
- action='store',
- help='Number of jobs')
- (options, args) = parser.parse_args()
- mode = options.mode
- if options.jobs:
- jobs = options.jobs
- else:
- jobs = utils.guessCpus()
- if not (mode in ['Debug', 'Release']):
- raise Exception('Invalid build mode')
-
- if options.target == 'all':
- targets = ALL_TARGETS
- else:
- targets = [options.target]
-
- if options.clobber:
- utils.runCommand(['rm', '-rf', 'out'])
-
- utils.runCommand(['ninja',
- '-j%s' % jobs,
- '-C',
- os.path.join('out', mode)]
- + targets)
-
-if __name__ == '__main__':
- main()
diff --git a/dartium_tools/buildbot_annotated_steps.py b/dartium_tools/buildbot_annotated_steps.py
deleted file mode 100755
index ed090c0..0000000
--- a/dartium_tools/buildbot_annotated_steps.py
+++ /dev/null
@@ -1,374 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Chromium buildbot steps
-
-Run the Dart layout tests.
-"""
-
-import os
-import platform
-import re
-import shutil
-import socket
-import subprocess
-import sys
-import imp
-
-BUILDER_NAME = 'BUILDBOT_BUILDERNAME'
-REVISION = 'BUILDBOT_REVISION'
-BUILDER_PATTERN = (r'^dartium-(mac|lucid64|lucid32|win)'
- r'-(full|inc|debug)(-ninja)?(-(be|dev|stable|integration))?$')
-
-if platform.system() == 'Windows':
- GSUTIL = 'e:/b/build/scripts/slave/gsutil.bat'
-else:
- GSUTIL = '/b/build/scripts/slave/gsutil'
-ACL = 'public-read'
-GS_SITE = 'gs://'
-GS_URL = 'https://sandbox.google.com/storage/'
-GS_DIR = 'dartium-archive'
-LATEST = 'latest'
-CONTINUOUS = 'continuous'
-
-REVISION_FILE = 'chrome/browser/ui/webui/dartvm_revision.h'
-
-# Add dartium tools and build/util to python path.
-SRC_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-TOOLS_PATH = os.path.join(SRC_PATH, 'dartium_tools')
-DART_PATH = os.path.join(SRC_PATH, 'dart')
-BUILD_UTIL_PATH = os.path.join(SRC_PATH, 'build/util')
-# We limit testing on drt since it takes a long time to run
-DRT_FILTER = 'html'
-
-
-sys.path.extend([TOOLS_PATH, BUILD_UTIL_PATH])
-import archive
-import utils
-
-bot_utils = imp.load_source('bot_utils',
- os.path.join(DART_PATH, 'tools', 'bots', 'bot_utils.py'))
-
-def DartArchiveFile(local_path, remote_path, create_md5sum=False):
- # Copy it to the new unified gs://dart-archive bucket
- # TODO(kustermann/ricow): Remove all the old archiving code, once everything
- # points to the new location
- gsutil = bot_utils.GSUtil()
- gsutil.upload(local_path, remote_path, public=True)
- if create_md5sum:
- # 'local_path' may have a different filename than 'remote_path'. So we need
- # to make sure the *.md5sum file contains the correct name.
- assert '/' in remote_path and not remote_path.endswith('/')
- mangled_filename = remote_path[remote_path.rfind('/') + 1:]
- local_md5sum = bot_utils.CreateChecksumFile(local_path, mangled_filename)
- gsutil.upload(local_md5sum, remote_path + '.md5sum', public=True)
-
-def UploadDartiumVariant(revision, name, channel, arch, mode, zip_file):
- name = name.replace('drt', 'content_shell')
- system = sys.platform
-
- namer = bot_utils.GCSNamer(channel, bot_utils.ReleaseType.RAW)
- remote_path = namer.dartium_variant_zipfilepath(revision, name, system, arch,
- mode)
- DartArchiveFile(zip_file, remote_path, create_md5sum=True)
- return remote_path
-
-def ExecuteCommand(cmd):
- """Execute a command in a subprocess.
- """
- print 'Executing: ' + ' '.join(cmd)
- try:
- pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (output, error) = pipe.communicate()
- if pipe.returncode != 0:
- print 'Execution failed: ' + str(error)
- return (pipe.returncode, output)
- except:
- import traceback
- print 'Execution raised exception:', traceback.format_exc()
- return (-1, '')
-
-
-# TODO: Instead of returning a tuple we should make a class with these fields.
-def GetBuildInfo():
- """Returns a tuple (name, dart_revision, version, mode, arch, channel,
- is_full) where:
- - name: A name for the build - the buildbot host if a buildbot.
- - dart_revision: The dart revision.
- - version: A version string corresponding to this build.
- - mode: 'Debug' or 'Release'
- - arch: target architecture
- - channel: the channel this build is happening on
- - is_full: True if this is a full build.
- """
- os.chdir(SRC_PATH)
-
- name = None
- version = None
- mode = 'Release'
-
- # Populate via builder environment variables.
- name = os.environ[BUILDER_NAME]
-
- # We need to chdir() to src/dart in order to get the correct revision number.
- with utils.ChangedWorkingDirectory(DART_PATH):
- dart_tools_utils = imp.load_source('dart_tools_utils',
- os.path.join('tools', 'utils.py'))
- dart_revision = dart_tools_utils.GetSVNRevision()
-
- version = dart_revision + '.0'
- is_incremental = '-inc' in name
- is_win_ninja = 'win-inc-ninja' in name
- is_full = False
-
- pattern = re.match(BUILDER_PATTERN, name)
- assert pattern
- arch = 'x64' if pattern.group(1) == 'lucid64' else 'ia32'
- if pattern.group(2) == 'debug':
- mode = 'Debug'
- is_full = pattern.group(2) == 'full'
- channel = pattern.group(5)
- if not channel:
- channel = 'be'
-
- # Fall back if not on builder.
- if not name:
- name = socket.gethostname().split('.')[0]
-
- return (name, dart_revision, version, mode, arch, channel, is_full,
- is_incremental, is_win_ninja)
-
-
-def RunDartTests(mode, component, suite, arch, checked, test_filter=None,
- is_win_ninja=False):
- """Runs the Dart WebKit Layout tests.
- """
- cmd = [sys.executable]
- script = os.path.join(TOOLS_PATH, 'test.py')
- cmd.append(script)
- cmd.append('--buildbot')
- cmd.append('--mode=' + mode)
- cmd.append('--component=' + component)
- cmd.append('--suite=' + suite)
- cmd.append('--arch=' + arch)
- cmd.append('--' + checked)
- cmd.append('--no-show-results')
-
- if is_win_ninja:
- cmd.append('--win-ninja-build')
-
- if test_filter:
- cmd.append('--test-filter=' + test_filter)
-
- status = subprocess.call(cmd)
- if status != 0:
- print '@@@STEP_FAILURE@@@'
- return status
-
-
-def UploadDartTestsResults(layout_test_results_dir, name, version,
- component, checked):
- """Uploads test results to google storage.
- """
- print ('@@@BUILD_STEP archive %s_layout_%s_tests results@@@' %
- (component, checked))
- dir_name = os.path.dirname(layout_test_results_dir)
- base_name = os.path.basename(layout_test_results_dir)
- cwd = os.getcwd()
- os.chdir(dir_name)
-
- archive_name = 'layout_test_results.zip'
- archive.ZipDir(archive_name, base_name)
-
- target = '/'.join([GS_DIR, 'layout-test-results', name, component + '-' +
- checked + '-' + version + '.zip'])
- status = UploadArchive(os.path.abspath(archive_name), GS_SITE + target)
- os.remove(archive_name)
- if status == 0:
- print ('@@@STEP_LINK@download@' + GS_URL + target + '@@@')
- else:
- print '@@@STEP_FAILURE@@@'
- os.chdir(cwd)
-
-
-def ListArchives(pattern):
- """List the contents in Google storage matching the file pattern.
- """
- cmd = [GSUTIL, 'ls', pattern]
- (status, output) = ExecuteCommand(cmd)
- if status != 0:
- return []
- return output.split(os.linesep)
-
-
-def RemoveArchives(archives):
- """Remove the list of archives in Google storage.
- """
- for archive in archives:
- if archive.find(GS_SITE) == 0:
- cmd = [GSUTIL, 'rm', archive.rstrip()]
- (status, _) = ExecuteCommand(cmd)
- if status != 0:
- return status
- return 0
-
-
-def UploadArchive(source, target):
- """Upload an archive zip file to Google storage.
- """
-
- # Upload file.
- cmd = [GSUTIL, 'cp', source, target]
- (status, output) = ExecuteCommand(cmd)
- if status != 0:
- return status
- print 'Uploaded: ' + output
-
- # Set ACL.
- if ACL is not None:
- cmd = [GSUTIL, 'setacl', ACL, target]
- (status, output) = ExecuteCommand(cmd)
- return status
-
-
-def main():
- (dartium_bucket, dart_revision, version, mode, arch, channel,
- is_full, is_incremental, is_win_ninja) = GetBuildInfo()
- drt_bucket = dartium_bucket.replace('dartium', 'drt')
- chromedriver_bucket = dartium_bucket.replace('dartium', 'chromedriver')
-
- def archiveAndUpload(archive_latest=False):
- print '@@@BUILD_STEP dartium_generate_archive@@@'
- cwd = os.getcwd()
- dartium_archive = dartium_bucket + '-' + version
- drt_archive = drt_bucket + '-' + version
- chromedriver_archive = chromedriver_bucket + '-' + version
- dartium_zip, drt_zip, chromedriver_zip = \
- archive.Archive(SRC_PATH, mode, dartium_archive,
- drt_archive, chromedriver_archive,
- is_win_ninja=is_win_ninja)
- status = upload('dartium', dartium_bucket, os.path.abspath(dartium_zip),
- archive_latest=archive_latest)
- if status == 0:
- status = upload('drt', drt_bucket, os.path.abspath(drt_zip),
- archive_latest=archive_latest)
- if status == 0:
- status = upload('chromedriver', chromedriver_bucket,
- os.path.abspath(chromedriver_zip),
- archive_latest=archive_latest)
- os.chdir(cwd)
- if status != 0:
- print '@@@STEP_FAILURE@@@'
- return status
-
- def upload(module, bucket, zip_file, archive_latest=False):
- status = 0
-
- # We archive to the new location on all builders except for -inc builders.
- if not is_incremental:
- print '@@@BUILD_STEP %s_upload_archive_new @@@' % module
- # We archive the full builds to gs://dart-archive/
- revision = 'latest' if archive_latest else dart_revision
- remote_path = UploadDartiumVariant(revision, module, channel, arch,
- mode.lower(), zip_file)
- print '@@@STEP_LINK@download@' + remote_path + '@@@'
-
- # We archive to the old locations only for bleeding_edge builders
- if channel == 'be':
- _, filename = os.path.split(zip_file)
- if not archive_latest:
- target = '/'.join([GS_DIR, bucket, filename])
- print '@@@BUILD_STEP %s_upload_archive@@@' % module
- status = UploadArchive(zip_file, GS_SITE + target)
- print '@@@STEP_LINK@download@' + GS_URL + target + '@@@'
- else:
- print '@@@BUILD_STEP %s_upload_latest@@@' % module
- # Clear latest for this build type.
- old = '/'.join([GS_DIR, LATEST, bucket + '-*'])
- old_archives = ListArchives(GS_SITE + old)
-
- # Upload the new latest and remove unnecessary old ones.
- target = GS_SITE + '/'.join([GS_DIR, LATEST, filename])
- status = UploadArchive(zip_file, target)
- if status == 0:
- RemoveArchives(
- [iarch for iarch in old_archives if iarch != target])
- else:
- print 'Upload failed'
-
- # Upload unversioned name to continuous site for incremental
- # builds.
- if '-inc' in bucket:
- continuous_name = bucket[:bucket.find('-inc')]
- target = GS_SITE + '/'.join([GS_DIR, CONTINUOUS,
- continuous_name + '.zip'])
- status = UploadArchive(zip_file, target)
-
- print ('@@@BUILD_STEP %s_upload_archive is over (status = %s)@@@' %
- (module, status))
-
- return status
-
- def test(component, suite, checked, test_filter=None):
- """Test a particular component (e.g., dartium or frog).
- """
- print '@@@BUILD_STEP %s_%s_%s_tests@@@' % (component, suite, checked)
- sys.stdout.flush()
- layout_test_results_dir = os.path.join(SRC_PATH, 'webkit', mode,
- 'layout-test-results')
- shutil.rmtree(layout_test_results_dir, ignore_errors=True)
- status = RunDartTests(mode, component, suite, arch, checked,
- test_filter=test_filter, is_win_ninja=is_win_ninja)
-
- if suite == 'layout' and status != 0:
- UploadDartTestsResults(layout_test_results_dir, dartium_bucket, version,
- component, checked)
- return status
-
- result = 0
-
- # Archive to the revision bucket unless integration build
- if channel != 'integration':
- result = archiveAndUpload(archive_latest=False)
-
- # On dev/stable we archive to the latest bucket as well
- if channel != 'be':
- result = archiveAndUpload(archive_latest=True) or result
-
- # Run layout tests
- if mode == 'Release' or platform.system() != 'Darwin':
- result = test('drt', 'layout', 'unchecked') or result
- result = test('drt', 'layout', 'checked') or result
-
- # Run dartium tests
- result = test('dartium', 'core', 'unchecked') or result
- result = test('dartium', 'core', 'checked') or result
-
- # Run ContentShell tests
- # NOTE: We don't run ContentShell tests on dartium-*-inc builders to keep
- # cycle times down.
- if not is_incremental:
- # If we run all checked tests on dartium, we restrict the number of
- # unchecked tests on drt to DRT_FILTER
- result = test('drt', 'core', 'unchecked', test_filter=DRT_FILTER) or result
- result = test('drt', 'core', 'checked') or result
-
- # On the 'be' channel, we only archive to the latest bucket if all tests ran
- # successfull.
- if result == 0 and channel == 'be':
- result = archiveAndUpload(archive_latest=True) or result
-
- # BIG HACK
- # Normal ninja clobbering does not work due to symlinks/python on windows
- # Full clobbering before building does not work since it will destroy
- # the ninja build files
- # So we basically clobber at the end here
- if is_full and platform.system() == 'Windows':
- print '@@@BUILD_STEP Dartium hackish clobber@@@'
- shutil.rmtree(os.path.join(SRC_PATH, 'out'), ignore_errors=True)
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/dartium_tools/export_overrides.py b/dartium_tools/export_overrides.py
deleted file mode 100755
index 0c046bf..0000000
--- a/dartium_tools/export_overrides.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2012 Google Inc. All Rights Reserved.
-
-# TODO(vsm): Remove this file once we remove the reference from
-# dartium.deps/DEPS.
diff --git a/dartium_tools/fetch_reference_build.py b/dartium_tools/fetch_reference_build.py
deleted file mode 100755
index 26a5c54..0000000
--- a/dartium_tools/fetch_reference_build.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Fetches an archived chromium build into
- src/chrome/tools/test/reference_build unless
- src/chrome/tools/test/reference_build/REQUESTED_REVISION is the same as
- src/chrome/tools/test/reference_build/CURRENT_REVISION.
- Must be run from the root of a Dartium or multivm checkout.
-
-Usage:
- $ ./src/dartium_tools/fetch_reference_build_revision.py
-"""
-
-import os
-import subprocess
-import sys
-
-def main(argv):
- dirname = os.path.join('src', 'chrome', 'tools',
- 'test', 'reference_build')
- request = os.path.join(dirname, 'REQUESTED_REVISION')
- found = os.path.join(dirname, 'CURRENT_REVISION')
- if not os.path.exists(request):
- return
- with file(request, 'r') as f:
- request_revision = f.read()
-
- if os.path.exists(found):
- with file(found, 'r') as f:
- found_revision = f.read()
- if found_revision == request_revision:
- return
-
- get_script = os.path.join('src', 'dartium_tools', 'get_chromium_build.py')
- get_script = os.path.abspath(get_script)
- exit_code = subprocess.call(['python', get_script,
- '-r', request_revision,
- '-t', dirname])
- if exit_code == 0:
- with file(found, 'w') as f:
- f.write(request_revision)
- return exit_code
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv))
diff --git a/dartium_tools/generate_dart_vm_version.py b/dartium_tools/generate_dart_vm_version.py
deleted file mode 100755
index d5183e26..0000000
--- a/dartium_tools/generate_dart_vm_version.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import datetime
-import imp
-import os
-import subprocess
-import sys
-import time
-
-utils = imp.load_source('utils', 'src/dart/tools/utils.py')
-
-
-REVISION_FILE = 'src/chrome/browser/ui/webui/dartvm_revision.h'
-EXPIRATION_FILE = 'src/third_party/WebKit/Source/bindings/dart/ExpirationTimeSecsSinceEpoch.time_t'
-
-def updateFile(filename, content):
- if os.path.exists(filename):
- if file(filename, 'r').read() == content:
- return
- else:
- dir = os.path.dirname(filename)
- if not os.path.exists(dir):
- os.makedirs(dir)
- file(filename, 'w').write(content)
-
-def main():
- dart_version = utils.GetVersion()
- version_string = '#define DART_VM_REVISION "%s"\n' % dart_version.strip()
-
- updateFile(REVISION_FILE, version_string)
-
- expiration_date = datetime.date.today() + datetime.timedelta(weeks=12)
- updateFile(EXPIRATION_FILE, "%dLL\n" % time.mktime(expiration_date.timetuple()))
-
-if __name__ == '__main__':
- main()
diff --git a/dartium_tools/get_chromium_build.py b/dartium_tools/get_chromium_build.py
deleted file mode 100755
index edb8bac..0000000
--- a/dartium_tools/get_chromium_build.py
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Gets a Chromium archived build, and unpacks it
- into a target directory.
-
- Use -r option to specify the revison number
- Use -t option to specify the directory to unzip the build into.
-
-Usage:
- $ /path/to/get_chromium_build.py -r <revision> -t <target>
-"""
-
-import logging
-import optparse
-import os
-import platform
-import shutil
-import subprocess
-import sys
-import time
-import urllib
-import urllib2
-import zipfile
-
-# Example chromium build location:
-# gs://chromium-browser-snapshots/Linux_x64/228977/chrome-linux.zip
-CHROMIUM_URL_FMT = ('http://commondatastorage.googleapis.com/'
- 'chromium-browser-snapshots/%s/%s/%s')
-
-class BuildUpdater(object):
- _PLATFORM_PATHS_MAP = {
- 'Linux': { 'zipfiles': ['chrome-linux.zip'],
- 'folder': 'chrome_linux',
- 'archive_path': 'Linux_x64'},
- 'Darwin': {'zipfiles': ['chrome-mac.zip'],
- 'folder': 'chrome_mac',
- 'archive_path': 'Mac'},
- 'Windows': {'zipfiles': ['chrome-win32.zip',
- 'chrome-win32-syms.zip'],
- 'folder': 'chrome_win',
- 'archive_path': 'Win'}}
-
- def __init__(self, options):
- platform_data = BuildUpdater._PLATFORM_PATHS_MAP[platform.system()]
- self._zipfiles = platform_data['zipfiles']
- self._folder = platform_data['folder']
- self._archive_path = platform_data['archive_path']
- self._revision = int(options.revision)
- self._target_dir = options.target_dir
- self._download_dir = os.path.join(self._target_dir, 'downloads')
-
- def _GetBuildUrl(self, revision, filename):
- return CHROMIUM_URL_FMT % (self._archive_path, revision, filename)
-
- def _FindBuildRevision(self, revision, filename):
- MAX_REVISIONS_PER_BUILD = 100
- for revision_guess in xrange(revision, revision + MAX_REVISIONS_PER_BUILD):
- if self._DoesBuildExist(revision_guess, filename):
- return revision_guess
- else:
- time.sleep(.1)
- return None
-
- def _DoesBuildExist(self, revision_guess, filename):
- url = self._GetBuildUrl(revision_guess, filename)
-
- r = urllib2.Request(url)
- r.get_method = lambda: 'HEAD'
- try:
- urllib2.urlopen(r)
- return True
- except urllib2.HTTPError, err:
- if err.code == 404:
- return False
-
- def _DownloadBuild(self):
- if not os.path.exists(self._download_dir):
- os.makedirs(self._download_dir)
- for zipfile in self._zipfiles:
- build_revision = self._FindBuildRevision(self._revision, zipfile)
- if not build_revision:
- logging.critical('Failed to find %s build for r%s\n',
- self._archive_path,
- self._revision)
- sys.exit(1)
- url = self._GetBuildUrl(build_revision, zipfile)
- logging.info('Downloading %s', url)
- r = urllib2.urlopen(url)
- with file(os.path.join(self._download_dir, zipfile), 'wb') as f:
- f.write(r.read())
-
- def _UnzipFile(self, dl_file, dest_dir):
- if not zipfile.is_zipfile(dl_file):
- return False
- logging.info('Unzipping %s', dl_file)
- with zipfile.ZipFile(dl_file, 'r') as z:
- for content in z.namelist():
- dest = os.path.join(dest_dir, content[content.find('/')+1:])
- # Create dest parent dir if it does not exist.
- if not os.path.isdir(os.path.dirname(dest)):
- logging.info('Making %s', dest)
- os.makedirs(os.path.dirname(dest))
- # If dest is just a dir listing, do nothing.
- if not os.path.basename(dest):
- continue
- with z.open(content) as unzipped_content:
- logging.info('Extracting %s to %s (%s)', content, dest, dl_file)
- with file(dest, 'wb') as dest_file:
- dest_file.write(unzipped_content.read())
- permissions = z.getinfo(content).external_attr >> 16
- if permissions:
- os.chmod(dest, permissions)
- return True
-
- def _ClearDir(self, dir):
- """Clears all files in |dir| except for hidden files and folders."""
- for root, dirs, files in os.walk(dir):
- # Skip hidden files and folders (like .svn and .git).
- files = [f for f in files if f[0] != '.']
- dirs[:] = [d for d in dirs if d[0] != '.']
-
- for f in files:
- os.remove(os.path.join(root, f))
-
- def _ExtractBuild(self):
- dest_dir = os.path.join(self._target_dir, self._folder)
- self._ClearDir(dest_dir)
- for root, _, dl_files in os.walk(os.path.join(self._download_dir)):
- for dl_file in dl_files:
- dl_file = os.path.join(root, dl_file)
- if not self._UnzipFile(dl_file, dest_dir):
- logging.info('Copying %s to %s', dl_file, dest_dir)
- shutil.copy(dl_file, dest_dir)
- shutil.rmtree(self._download_dir)
-
- def DownloadAndUpdateBuild(self):
- self._DownloadBuild()
- self._ExtractBuild()
-
-
-def ParseOptions(argv):
- parser = optparse.OptionParser()
- usage = 'usage: %prog <options>'
- parser.set_usage(usage)
- parser.add_option('-r', dest='revision',
- help='Revision to download.')
- parser.add_option('-t', dest='target_dir',
- help='Target directory for unzipped Chromium.')
-
- (options, _) = parser.parse_args(argv)
- if not options.revision:
- logging.critical('Must specify -r.\n')
- sys.exit(1)
- if not options.target_dir:
- logging.critical('Must specify -t.\n')
- sys.exit(1)
- return options
-
-def main(argv):
- logging.getLogger().setLevel(logging.DEBUG)
- options = ParseOptions(argv)
- b = BuildUpdater(options)
- b.DownloadAndUpdateBuild()
- logging.info('Successfully got archived Chromium build.')
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv))
diff --git a/dartium_tools/print_dart_version.sh b/dartium_tools/print_dart_version.sh
deleted file mode 100755
index 273e0d5..0000000
--- a/dartium_tools/print_dart_version.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-
-# Print svn revisions for Dartium internal repositories. The output
-# is included in each Dartium archive build / release.
-#
-# This script is necessary because Dartium maintains its own branches
-# of Chromium and WebKit. This script is for temporary use only; it
-# will not be integrated back into Chromium.
-
-function version() {
- if [ $(svnversion) == exported ]
- then
- # git-svn
- git svn info | grep Revision | cut -c 11-
- else
- # svn
- echo $(svnversion)
- fi
-}
-
-root_dir=$(dirname $0)/../..
-pushd ${root_dir} > /dev/null
-echo dartium-chromium: $(version)
-cd third_party/WebKit
-echo dartium-webkit: $(version)
-cd ../../dart/runtime
-echo dartium-runtime: $(version)
-popd > /dev/null
diff --git a/dartium_tools/roll_forward.py b/dartium_tools/roll_forward.py
deleted file mode 100755
index de3bc4a..0000000
--- a/dartium_tools/roll_forward.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2012 Google Inc. All Rights Reserved.
-
-import os
-import re
-import shutil
-import subprocess
-import sys
-import update_patched_files
-import urllib
-
-
-def GetLkgr():
- f = urllib.urlopen('http://chromium-status.appspot.com/lkgr')
- try:
- return int(f.read())
- finally:
- f.close()
-
-
-def ReadDepsVars(path):
- exec_globals = {
- 'Var': lambda name: exec_globals['vars'][name],
- }
- execfile(path, exec_globals)
- return exec_globals['vars']
-
-
-def GetRevision(path, name):
- return int(ReadDepsVars(path)[name])
-
-
-def main(argv):
- CHROMIUM_DEPS_FILE = 'DEPS'
- DARTIUM_DEPS_FILE = '../dartium.deps/DEPS'
- CHROMIUM_DEPS_COPY = '../dartium.deps/DEPS.chromium'
- REV_PATTERN = '"chromium_revision": "(\d+)",'
-
- deps = file(DARTIUM_DEPS_FILE).read()
- current_chrome_rev = int(re.search(REV_PATTERN, deps).group(1))
-
- if len(argv) < 2:
- next_chrome_rev = GetLkgr()
- else:
- next_chrome_rev = int(argv[1])
-
- print 'Chromium roll: %d -> %d' % (current_chrome_rev, next_chrome_rev)
-
- if current_chrome_rev == next_chrome_rev:
- return
-
- # Update patched files.
- os.chdir('..')
- update_patched_files.update_overridden_files(current_chrome_rev, next_chrome_rev)
- os.chdir('src')
-
- # Update DEPS.
- subprocess.check_call(['svn', 'up', '-r', str(current_chrome_rev), CHROMIUM_DEPS_FILE])
- current_webkit_rev = GetRevision(CHROMIUM_DEPS_FILE, 'webkit_revision')
- subprocess.check_call(['svn', 'up', '-r', str(next_chrome_rev), CHROMIUM_DEPS_FILE])
- next_webkit_rev = GetRevision(CHROMIUM_DEPS_FILE, 'webkit_revision')
-
- shutil.copyfile(CHROMIUM_DEPS_FILE, CHROMIUM_DEPS_COPY)
- deps = deps.replace('"chromium_revision": "%d",' % current_chrome_rev, '"chromium_revision": "%d",' % next_chrome_rev)
- file(DARTIUM_DEPS_FILE, 'w').write(deps)
-
- # Do webkit roll.
- WEBKIT_DIR = 'third_party/WebKit'
- subprocess.check_call(['git', 'svn', 'rebase'], cwd=WEBKIT_DIR)
- print 'WebKit roll: %d -> %d' % (current_webkit_rev, next_webkit_rev)
-
- if current_webkit_rev < next_webkit_rev:
- subprocess.check_call(['bash',
- '../../dartium_tools/roll_webkit.sh',
- str(current_webkit_rev), str(next_webkit_rev)], cwd=WEBKIT_DIR)
-
- # Update the checkout.
- subprocess.check_call(['gclient', 'sync', '-j17'])
-
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv))
diff --git a/dartium_tools/roll_webkit.sh b/dartium_tools/roll_webkit.sh
deleted file mode 100755
index 48077f3..0000000
--- a/dartium_tools/roll_webkit.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash -e
-
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This script does WebKit roll provided old and new svn revisions.
-
-gitSha() {
- git log --format=%h --grep "git-svn-id: svn://svn.chromium.org/blink/trunk@${1}" blink/master
-}
-
-git checkout master
-git svn rebase
-git fetch blink
-
-old_svn_rev=$1
-new_svn_rev=$2
-
-old_rev="$(gitSha $old_svn_rev)"
-new_rev="$(gitSha $new_svn_rev)"
-
-merge_branch_name="merge-${old_svn_rev}-${new_svn_rev}"
-
-git checkout -b ${merge_branch_name} ${old_rev}
-git diff ${old_rev} ${new_rev} --binary | git apply --binary --index
-# git cherry-pick --no-commit ${old_rev}..${new_rev}
-git commit -m "MERGE: ${old_svn_rev}-${new_svn_rev}."
-git rebase --onto master ${merge_branch_name}~1 ${merge_branch_name}
- \ No newline at end of file
diff --git a/dartium_tools/set_reference_build_revision.py b/dartium_tools/set_reference_build_revision.py
deleted file mode 100755
index e27c1e6..0000000
--- a/dartium_tools/set_reference_build_revision.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Writes a revision number into src/chrome/tools/test/reference_build/REVISON
- Must be run from the root of a Dartium or multivm checkout.
-
-Usage:
- $ ./src/dartium_tools/set_reference_build_revision.py <revision>
-"""
-
-import os
-import sys
-
-def main(argv):
- revision = argv[1]
- output = os.path.join('src', 'chrome', 'tools',
- 'test', 'reference_build',
- 'REQUESTED_REVISION')
- dirname = os.path.dirname(output)
- if dirname and not os.path.exists(dirname):
- os.makedirs(dirname)
- with file(output, 'w') as f:
- f.write(revision)
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv))
diff --git a/dartium_tools/supplement.gypi b/dartium_tools/supplement.gypi
deleted file mode 100644
index a0e5306..0000000
--- a/dartium_tools/supplement.gypi
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- 'includes': [
- '../third_party/WebKit/Source/bindings/dart/gyp/overrides.gypi',
- ],
- 'variables': {
- # Fixes mysterious forge issue.
- 'use_custom_freetype': 0,
- },
-}
diff --git a/dartium_tools/test.py b/dartium_tools/test.py
deleted file mode 100755
index 75b85d5..0000000
--- a/dartium_tools/test.py
+++ /dev/null
@@ -1,242 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2011 Google Inc. All Rights Reserved.
-
-import fnmatch
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import urllib
-import utils
-
-SCRIPT_TAG = '<script type="application/%s" src="%s"></script>\n'
-
-DART_TEST_DIR = os.path.join('dart')
-
-DART_VM_FLAGS = [
- ]
-DART_VM_CHECKED_FLAGS = DART_VM_FLAGS + [
- '--enable_type_checks',
- '--warning_as_error',
- ]
-
-TEST_DRT_FLAGS = [
- '--compiler=none',
- '--runtime=drt',
- '--drt=%(drt)s',
- '--mode=%(mode)s',
- '--arch=%(arch)s',
- '--build-directory=%(build_dir)s',
- '--report',
- '--time',
- ]
-
-TEST_DRT_CHECKED_FLAGS = TEST_DRT_FLAGS + [
- '--checked',
- ]
-
-TEST_DARTIUM_FLAGS = [
- '--compiler=none',
- '--runtime=dartium',
- '--dartium=%(dartium)s',
- '--mode=%(mode)s',
- '--build-directory=%(build_dir)s',
- '--report',
- '--time',
- ]
-
-TEST_DARTIUM_CHECKED_FLAGS = TEST_DARTIUM_FLAGS + [
- '--checked',
- ]
-
-TEST_INFO = {
- 'dartium': {
- 'core': {
- 'checked': TEST_DARTIUM_CHECKED_FLAGS,
- 'unchecked': TEST_DARTIUM_FLAGS,
- },
- },
- 'drt': {
- 'layout': {
- 'checked': DART_VM_CHECKED_FLAGS,
- 'unchecked': DART_VM_FLAGS,
- },
- 'core': {
- 'checked': TEST_DRT_CHECKED_FLAGS,
- 'unchecked': TEST_DRT_FLAGS,
- },
- },
-}
-
-COMPONENTS = TEST_INFO.keys()
-SUITES = [ 'layout', 'core' ]
-
-def main():
- parser = optparse.OptionParser()
- parser.add_option('--mode', dest='mode',
- action='store', type='string',
- help='Test mode (Debug or Release)')
- parser.add_option('--component', dest='component',
- default='drt',
- action='store', type='string',
- help='Execution mode (dartium, drt or all)')
- parser.add_option('--suite', dest='suite',
- default='all',
- action='store', type='string',
- help='Test suite (layout, core, or all)')
- parser.add_option('--arch', dest='arch',
- default='ia32',
- action='store', type='string',
- help='Target architecture')
- parser.add_option('--no-show-results', action='store_false',
- default=True, dest='show_results',
- help='Don\'t launch a browser with results '
- 'after the tests are done')
- parser.add_option('--checked', action='store_true',
- default=False, dest='checked',
- help='Run Dart code in checked mode')
- parser.add_option('--unchecked', action='store_true',
- default=False, dest='unchecked',
- help='Run Dart code in unchecked mode')
- parser.add_option('--buildbot', action='store_true',
- default=False, dest='buildbot',
- help='Print results in buildbot format')
- parser.add_option('--layout-test', dest='layout_test',
- default=None,
- action='store', type='string',
- help='Single layout test to run if set')
- parser.add_option('--test-filter', dest='test_filter',
- default=None,
- action='store', type='string',
- help='Test filter for core tests')
- parser.add_option('--win-ninja-build', action='store_true',
- default=False, dest='is_win_ninja',
- help='We are on windows and use ninja for building.')
-
- (options, args) = parser.parse_args()
- mode = options.mode
- if not (mode in ['Debug', 'Release']):
- raise Exception('Invalid test mode')
-
- if options.component == 'all':
- components = COMPONENTS
- elif not (options.component in COMPONENTS):
- raise Exception('Invalid component %s' % options.component)
- else:
- components = [ options.component ]
-
- if options.suite == 'all':
- suites = SUITES
- elif not (options.suite in SUITES):
- raise Exception('Invalid suite %s' % options.suite)
- else:
- suites = [ options.suite ]
-
- # If --checked or --unchecked not present, run with both.
- checkmodes = ['unchecked', 'checked']
- if options.checked or options.unchecked:
- checkmodes = []
- if options.unchecked: checkmodes.append('unchecked')
- if options.checked: checkmodes.append('checked')
-
- pathname = os.path.dirname(sys.argv[0])
- fullpath = os.path.abspath(pathname)
- srcpath = os.path.normpath(os.path.join(fullpath, '..'))
-
- test_mode = ''
- timeout = 30000
- if mode == 'Debug':
- test_mode = '--debug'
- timeout = 60000
-
- show_results = ''
- if not options.show_results:
- show_results = '--no-show-results'
-
- host_os = utils.guessOS()
- if options.is_win_ninja:
- host_os = 'win-ninja'
- build_root, drt_path, dartium_path, dart_path = {
- 'mac': (
- 'out',
- os.path.join('Content Shell.app', 'Contents', 'MacOS', 'Content Shell'),
- os.path.join('Chromium.app', 'Contents', 'MacOS', 'Chromium'),
- 'dart',
- ),
- 'linux': ('out', 'content_shell', 'chrome', 'dart'),
- 'win': ('out', 'content_shell.exe', 'chrome.exe', 'dart.exe'),
- 'win-ninja': ('out', 'content_shell.exe', 'chrome.exe', 'dart.exe'),
- }[host_os]
-
- build_dir = os.path.join(srcpath, build_root, mode)
-
- executable_map = {
- 'mode': mode.lower(),
- 'build_dir': os.path.relpath(build_dir),
- 'drt': os.path.join(build_dir, drt_path),
- 'dartium': os.path.join(build_dir, dartium_path),
- 'dart': os.path.join(build_dir, dart_path),
- 'arch': options.arch,
- }
-
- test_script = os.path.join(srcpath, 'webkit', 'tools', 'layout_tests',
- 'run_webkit_tests.py')
-
- errors = False
- for component in components:
- for checkmode in checkmodes:
- # Capture errors and report at the end.
- try:
- if ('layout' in suites and
- 'layout' in TEST_INFO[component] and
- checkmode in TEST_INFO[component]['layout']):
- # Run layout tests in this mode
- dart_flags = ' '.join(TEST_INFO[component]['layout'][checkmode])
-
- if options.layout_test:
- test = os.path.join(DART_TEST_DIR, options.layout_test)
- else:
- test = DART_TEST_DIR
- package_root = os.path.join(build_dir, 'packages')
- utils.runCommand(['python',
- test_script,
- test_mode,
- show_results,
- '--time-out-ms', str(timeout),
- # Temporary hack to fix issue with svn vs. svn.bat.
- '--builder-name', 'BuildBot',
- '--additional-env-var',
- 'DART_FLAGS=%s' % dart_flags,
- '--additional-env-var',
- 'DART_PACKAGE_ROOT=file://%s' % package_root,
- test])
-
- # Run core dart tests
- if ('core' in suites and
- 'core' in TEST_INFO[component] and
- checkmode in TEST_INFO[component]['core']):
- core_flags = TEST_INFO[component]['core'][checkmode]
- core_flags = map(lambda flag: flag % executable_map, core_flags)
- if options.buildbot:
- core_flags = ['--progress=buildbot'] + core_flags
- tester = os.path.join(srcpath, 'dart', 'tools', 'test.py')
- test_filter = [options.test_filter] if options.test_filter else []
- utils.runCommand(['python', tester] + core_flags + test_filter)
- except (StandardError, Exception) as e:
- print 'Fail: ' + str(e)
- errors = True
-
- if errors:
- return 1
- else:
- return 0
-
-if __name__ == '__main__':
- try:
- sys.exit(main())
- except StandardError as e:
- print 'Fail: ' + str(e)
- sys.exit(1)
diff --git a/dartium_tools/update_deps.py b/dartium_tools/update_deps.py
deleted file mode 100755
index 0b675a1..0000000
--- a/dartium_tools/update_deps.py
+++ /dev/null
@@ -1,226 +0,0 @@
-#!/usr/bin/python
-
-# Update Dartium DEPS automatically.
-
-from datetime import datetime, timedelta
-import optparse
-import os
-import re
-from subprocess import Popen, PIPE
-import sys
-from time import strptime
-
-# Instructions:
-#
-# To run locally:
-# (a) Create and change to a directory to run the updater in:
-# > mkdir /usr/local/google/home/$USER/dartium_deps_updater
-# > cd /usr/local/google/home/$USER/dartium_deps_updater
-#
-# (b) Checkout a copy of the DEPS for the updater to process / update:
-# > svn co https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps
-#
-# (c) Checkout dartium_tools (with this script) using the current branch instead of 1750:
-# > svn co svn://svn.chromium.org/chrome/branches/dart/1750/src/dartium_tools
-#
-# (d) If your home directory is remote, consider redefining it for this shell/script:
-# > cp -R $HOME/.subversion /usr/local/google/home/$USER
-# > export HOME=/usr/local/google/home/$USER
-#
-# (e) Test by running (Ctrl-C to quit):
-# > ./dartium_tools/update_deps.py
-#
-# (f) Run periodical update:
-# > while true; do ./dartium_tools/update_deps.py --force ; sleep 300 ; done
-
-########################################################################
-# Repositories to auto-update
-########################################################################
-
-# Each element in this map represents a repository to update. Entries
-# take the form:
-# (repo_tag: (svn_url, view_url))
-#
-# The repo_tag must match the DEPS revision entry. I.e, there must be
-# an entry of the form:
-# 'dartium_%s_revision' % repo_tag
-# to roll forward.
-#
-# The view_url should be parameterized by revision number. This is
-# used to generated the commit message.
-REPOSITORY_INFO = {
- 'webkit': (
- 'http://src.chromium.org/blink/branches/dart/1750',
- 'http://src.chromium.org/viewvc/blink/branches/dart/1750?view=rev&revision=%s'),
- 'chromium': (
- 'http://src.chromium.org/chrome/branches/dart/1750/src',
- 'http://src.chromium.org/viewvc/chrome/branches/dart/1750/src?view=rev&revision=%s'),
-}
-
-REPOSITORIES = REPOSITORY_INFO.keys()
-
-########################################################################
-# Actions
-########################################################################
-
-def write_file(filename, content):
- f = open(filename, "w")
- f.write(content)
- f.close()
-
-def run_cmd(cmd):
- print "\n[%s]\n$ %s" % (os.getcwd(), " ".join(cmd))
- pipe = Popen(cmd, stdout=PIPE, stderr=PIPE)
- output = pipe.communicate()
- if pipe.returncode == 0:
- return output[0]
- else:
- print output[1]
- print "FAILED. RET_CODE=%d" % pipe.returncode
- sys.exit(pipe.returncode)
-
-def parse_iso_time(s):
- pair = s.rsplit(' ', 1)
- d = datetime.strptime(pair[0], '%Y-%m-%d %H:%M:%S')
- offset = timedelta(hours=int(pair[1][0:3]))
- return d - offset
-
-def parse_git_log(output, repo):
- if len(output) < 4:
- return []
- lst = output.split(os.linesep)
- lst = [s.strip('\'') for s in lst]
- lst = [s.split(',', 3) for s in lst]
- lst = [{'repo': repo,
- 'rev': s[0],
- 'isotime':s[1],
- 'author': s[2],
- 'utctime': parse_iso_time(s[1]),
- 'info': s[3]} for s in lst]
- return lst
-
-def parse_svn_log(output, repo):
- lst = output.split(os.linesep)
- lst = [s.strip('\'') for s in lst]
- output = '_LINESEP_'.join(lst)
- lst = output.split('------------------------------------------------------------------------')
- lst = [s.replace('_LINESEP_', '\n') for s in lst]
- lst = [s.strip('\n') for s in lst]
- lst = [s.strip(' ') for s in lst]
- lst = [s for s in lst if len(s) > 0]
- pattern = re.compile(' \| (\d+) line(s|)')
- lst = [pattern.sub(' | ', s) for s in lst]
- lst = [s.split(' | ', 3) for s in lst]
- lst = [{'repo': repo,
- 'rev': s[0].replace('r', ''),
- 'author': s[1],
- 'isotime':s[2][0:25],
- 'utctime': parse_iso_time(s[2][0:25]),
- 'info': s[3].split('\n')[2]} for s in lst]
- return lst
-
-def commit_url(repo, rev):
- numrev = rev.replace('r', '')
- if repo in REPOSITORIES:
- (_, view_url) = REPOSITORY_INFO[repo]
- return view_url % numrev
- else:
- raise Exception('Unknown repo');
-
-def find_max(revs):
- max_time = None
- max_position = None
- for i, rev in enumerate(revs):
- if rev == []:
- continue
- if max_time is None or rev[0]['utctime'] > max_time:
- max_time = rev[0]['utctime']
- max_position = i
- return max_position
-
-def merge_revs(revs):
- position = find_max(revs)
- if position is None:
- return []
- item = revs[position][0]
- revs[position] = revs[position][1:]
- return [item] + merge_revs(revs)
-
-def main():
- option_parser = optparse.OptionParser()
- option_parser.add_option('', '--force', help="Push DEPS update to server without prompting", action="store_true", dest="force")
- options, args = option_parser.parse_args()
-
- src_dir = "/usr/local/google/home/%s/dartium_deps_updater/dartium.deps" % os.environ["USER"]
- os.putenv("GIT_PAGER", "")
-
- if not os.path.exists(src_dir):
- print "Error: prior to running this script, you need to check out a Dartium source tree at"
- print " %s" % src_dir
- print "Please reserve the above directory for this script and do not use it for other purposes."
- sys.exit(1)
-
- os.chdir(src_dir)
-
- # parse DEPS
- deps = run_cmd(['svn', 'cat', 'https://dart.googlecode.com/svn/branches/bleeding_edge/deps/dartium.deps/DEPS'])
- rev_num = {}
- for repo in REPOSITORIES:
- revision = 'dartium_%s_revision":\s*"(.+)"' % repo
- rev_num[repo] = re.search(revision, deps).group(1)
-
- # update repos
- all_revs = []
- for repo, (svn_url, _) in REPOSITORY_INFO.items():
- output = run_cmd(["svn", "log", "-r", "HEAD:%s" % rev_num[repo], svn_url])
- revs = parse_svn_log(output, repo)
- if revs and revs[-1]['rev'] == rev_num[repo]:
- revs.pop()
- all_revs.append(revs)
-
- pending_updates = merge_revs(all_revs)
- pending_updates.reverse()
-
- print
- print "Current DEPS revisions:"
- for repo in REPOSITORIES:
- print ' dartium_%s_revision=%s' % (repo, rev_num[repo])
-
- if len(pending_updates) == 0:
- print "DEPS is up-to-date."
- sys.exit(0)
- else:
- print "Pending DEPS updates:"
- for s in pending_updates:
- print " %s to %s (%s) %s" % (s['repo'], s['rev'], s['isotime'], s['info'])
-
- # make the next DEPS update
- os.chdir(src_dir)
- run_cmd(['rm', 'DEPS'])
- print run_cmd(['svn', 'update'])
- s = pending_updates[0]
-
- pattern = re.compile('dartium_' + s['repo'] + '_revision":\s*"(.+)"')
- new_deps = pattern.sub('dartium_' + s['repo'] + '_revision": "' + s['rev'] + '"', deps)
- write_file('DEPS', new_deps)
-
- commit_log = 'DEPS AutoUpdate: %s to %s (%s) %s\n' % (s['repo'], s['rev'], s['isotime'], s['author'])
- commit_log += s['info'] + '\n' + commit_url(s['repo'], s['rev'])
-
- write_file('commit_log.txt', commit_log)
- print run_cmd(['svn', 'diff'])
- print
- print "Commit log:"
- print "---------------------------------------------"
- print commit_log
- print "---------------------------------------------"
-
- if not options.force:
- print "Ready to push; press Enter to continue or Control-C to abort..."
- sys.stdin.readline()
- print run_cmd(['svn', 'commit', '--file', 'commit_log.txt'])
- print "Done."
-
-
-if '__main__' == __name__:
- main()
diff --git a/dartium_tools/update_patched_files.py b/dartium_tools/update_patched_files.py
deleted file mode 100755
index 37e9543..0000000
--- a/dartium_tools/update_patched_files.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2012 Google Inc. All Rights Reserved.
-
-import overrides_database
-import shutil
-import subprocess
-import sys
-
-
-def svn_update(path, rev):
- subprocess.call(['svn', 'up', '-r', str(rev), path])
-
-
-def update_overridden_files(old_rev, new_rev):
- assert old_rev < new_rev
- for override in overrides_database.OVERRIDDEN_FILES:
- patched = override['modified']
- orig = override['original']
- svn_update(orig, old_rev)
- shutil.copyfile(patched, orig)
- svn_update(orig, new_rev)
- shutil.copyfile(orig, patched)
-
-
-if __name__ == '__main__':
- update_overridden_files(int(sys.argv[1]), int(sys.argv[2]))
diff --git a/dartium_tools/update_version.py b/dartium_tools/update_version.py
deleted file mode 100755
index 020eea3..0000000
--- a/dartium_tools/update_version.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2012 Google Inc. All Rights Reserved.
-
-import subprocess
-import sys
-
-def FetchSVNRevision():
- try:
- proc = subprocess.Popen(['svn', 'info'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd='src/dart',
- shell=(sys.platform=='win32'))
- except OSError:
- # command is apparently either not installed or not executable.
- return None
- if not proc:
- return None
-
- for line in proc.stdout:
- line = line.strip()
- if not line:
- continue
- key, val = line.split(': ', 1)
- if key == 'Revision':
- return val
-
- return None
-
-
-def main():
- revision = FetchSVNRevision()
- path = 'src/chrome/VERSION'
- text = file(path).readlines()
- text[2] = 'BUILD=d%s\n' % revision
- file(path, 'w').writelines(text)
-
-if __name__ == '__main__':
- main()
diff --git a/dartium_tools/utils.py b/dartium_tools/utils.py
deleted file mode 100755
index e2a50ed..0000000
--- a/dartium_tools/utils.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright 2010 Google Inc. All Rights Reserved.
-
-# This file contains a set of utilities functions used
-# by both SConstruct and other Python-based scripts.
-
-import commands
-import os
-import platform
-import re
-import subprocess
-
-class ChangedWorkingDirectory(object):
- def __init__(self, new_dir):
- self._new_dir = new_dir
-
- def __enter__(self):
- self._old_dir = os.getcwd()
- os.chdir(self._new_dir)
- return self._new_dir
-
- def __exit__(self, *_):
- os.chdir(self._old_dir)
-
-# Try to guess the host operating system.
-def guessOS():
- id = platform.system()
- if id == "Linux":
- return "linux"
- elif id == "Darwin":
- return "mac"
- elif id == "Windows" or id == "Microsoft":
- # On Windows Vista platform.system() can return "Microsoft" with some
- # versions of Python, see http://bugs.python.org/issue1082 for details.
- return "win"
- else:
- return None
-
-
-# Try to guess the host architecture.
-def guessArchitecture():
- id = platform.machine()
- if id.startswith('arm'):
- return 'arm'
- elif (not id) or (not re.match('(x|i[3-6])86', id) is None):
- return 'x86'
- elif id == 'i86pc':
- return 'x86'
- else:
- return None
-
-
-# Try to guess the number of cpus on this machine.
-def guessCpus():
- if os.path.exists("/proc/cpuinfo"):
- return int(commands.getoutput("grep -E '^processor' /proc/cpuinfo | wc -l"))
- if os.path.exists("/usr/bin/hostinfo"):
- return int(commands.getoutput('/usr/bin/hostinfo | grep "processors are logically available." | awk "{ print \$1 }"'))
- win_cpu_count = os.getenv("NUMBER_OF_PROCESSORS")
- if win_cpu_count:
- return int(win_cpu_count)
- return int(os.getenv("PARFAIT_NUMBER_OF_CORES", 2))
-
-
-# Returns true if we're running under Windows.
-def isWindows():
- return guessOS() == 'win32'
-
-# Reads a text file into an array of strings - one for each
-# line. Strips comments in the process.
-def readLinesFrom(name):
- result = []
- for line in open(name):
- if '#' in line:
- line = line[:line.find('#')]
- line = line.strip()
- if len(line) == 0:
- continue
- result.append(line)
- return result
-
-def listArgCallback(option, opt_str, value, parser):
- if value is None:
- value = []
-
- for arg in parser.rargs:
- if arg[:2].startswith('--'):
- break
- value.append(arg)
-
- del parser.rargs[:len(value)]
- setattr(parser.values, option.dest, value)
-
-
-def getCommandOutput(cmd):
- print cmd
- pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = pipe.communicate()
- if pipe.returncode == 0:
- return output[0]
- else:
- print output[1]
- raise Exception('Failed to run command. return code=%s' % pipe.returncode)
-
-def runCommand(cmd, env_update=None):
- if env_update is None:
- env_update = {}
- print 'Running: ' + ' '.join(["%s='%s'" % (k, v) for k, v in env_update.iteritems()]) + ' ' + ' '.join(cmd)
- env_copy = dict(os.environ.items())
- env_copy.update(env_update)
- p = subprocess.Popen(cmd, env=env_copy)
- if p.wait() != 0:
- raise Exception('Failed to run command. return code=%s' % p.returncode)
-
-def main(argv):
- print "GuessOS() -> ", guessOS()
- print "GuessArchitecture() -> ", guessArchitecture()
- print "GuessCpus() -> ", guessCpus()
- print "IsWindows() -> ", isWindows()
-
-
-if __name__ == "__main__":
- import sys
- main(sys.argv)
diff --git a/net/base/mime_util.cc b/net/base/mime_util.cc
index c0b8d36..4679dcc 100644
--- a/net/base/mime_util.cc
+++ b/net/base/mime_util.cc
@@ -63,7 +63,6 @@ class MimeUtil : public PlatformMimeUtil {
bool IsSupportedNonImageMimeType(const std::string& mime_type) const;
bool IsUnsupportedTextMimeType(const std::string& mime_type) const;
bool IsSupportedJavascriptMimeType(const std::string& mime_type) const;
- bool IsSupportedDartMimeType(const std::string& mime_type) const;
bool IsSupportedMimeType(const std::string& mime_type) const;
@@ -110,7 +109,6 @@ class MimeUtil : public PlatformMimeUtil {
MimeMappings non_image_map_;
MimeMappings unsupported_text_map_;
MimeMappings javascript_map_;
- MimeMappings dart_map_;
MimeMappings codecs_map_;
StrictMappings strict_format_map_;
@@ -152,7 +150,6 @@ static const MimeInfo secondary_mappings[] = {
{ "application/pdf", "pdf" },
{ "application/postscript", "ps,eps,ai" },
{ "application/javascript", "js" },
- { "application/dart", "dart" },
{ "application/font-woff", "woff" },
{ "image/bmp", "bmp" },
{ "image/x-icon", "ico" },
@@ -429,10 +426,6 @@ static bool IsCodecSupportedOnAndroid(const std::string& codec) {
}
#endif
-static const char* const supported_dart_types[] = {
- "application/dart",
-};
-
struct MediaFormatStrict {
const char* mime_type;
const char* codecs_list;
@@ -475,8 +468,6 @@ void MimeUtil::InitializeMimeTypeMaps() {
unsupported_text_map_.insert(unsupported_text_types[i]);
for (size_t i = 0; i < arraysize(supported_javascript_types); ++i)
non_image_map_.insert(supported_javascript_types[i]);
- for (size_t i = 0; i < arraysize(supported_dart_types); ++i)
- non_image_map_.insert(supported_dart_types[i]);
for (size_t i = 0; i < arraysize(common_media_types); ++i)
non_image_map_.insert(common_media_types[i]);
#if defined(USE_PROPRIETARY_CODECS)
@@ -494,8 +485,6 @@ void MimeUtil::InitializeMimeTypeMaps() {
for (size_t i = 0; i < arraysize(supported_javascript_types); ++i)
javascript_map_.insert(supported_javascript_types[i]);
- for (size_t i = 0; i < arraysize(supported_dart_types); ++i)
- dart_map_.insert(supported_dart_types[i]);
for (size_t i = 0; i < arraysize(common_media_codecs); ++i) {
#if defined(OS_ANDROID)
@@ -553,11 +542,6 @@ bool MimeUtil::IsSupportedJavascriptMimeType(
return javascript_map_.find(mime_type) != javascript_map_.end();
}
-bool MimeUtil::IsSupportedDartMimeType(
- const std::string& mime_type) const {
- return dart_map_.find(mime_type) != dart_map_.end();
-}
-
// Mirrors WebViewImpl::CanShowMIMEType()
bool MimeUtil::IsSupportedMimeType(const std::string& mime_type) const {
return (mime_type.compare(0, 6, "image/") == 0 &&
@@ -780,10 +764,6 @@ bool IsSupportedJavascriptMimeType(const std::string& mime_type) {
return g_mime_util.Get().IsSupportedJavascriptMimeType(mime_type);
}
-bool IsSupportedDartMimeType(const std::string& mime_type) {
- return g_mime_util.Get().IsSupportedDartMimeType(mime_type);
-}
-
bool IsSupportedMimeType(const std::string& mime_type) {
return g_mime_util.Get().IsSupportedMimeType(mime_type);
}
diff --git a/net/base/mime_util.h b/net/base/mime_util.h
index d5e15af..9662e96 100644
--- a/net/base/mime_util.h
+++ b/net/base/mime_util.h
@@ -44,7 +44,6 @@ NET_EXPORT bool IsSupportedMediaMimeType(const std::string& mime_type);
NET_EXPORT bool IsSupportedNonImageMimeType(const std::string& mime_type);
NET_EXPORT bool IsUnsupportedTextMimeType(const std::string& mime_type);
NET_EXPORT bool IsSupportedJavascriptMimeType(const std::string& mime_type);
-NET_EXPORT bool IsSupportedDartMimeType(const std::string& mime_type);
NET_EXPORT bool IsSupportedCertificateMimeType(const std::string& mime_type);
// Convenience function.
diff --git a/net/base/network_change_notifier_win.cc b/net/base/network_change_notifier_win.cc
index 135905e..77a72b0 100644
--- a/net/base/network_change_notifier_win.cc
+++ b/net/base/network_change_notifier_win.cc
@@ -54,7 +54,6 @@ class NetworkChangeNotifierWin::DnsConfigServiceThread : public base::Thread {
NetworkChangeNotifierWin::NetworkChangeNotifierWin()
: NetworkChangeNotifier(NetworkChangeCalculatorParamsWin()),
is_watching_(false),
- network_change_event_handle_(NULL),
sequential_failures_(0),
weak_factory_(this),
dns_config_service_thread_(new DnsConfigServiceThread()),
@@ -223,15 +222,10 @@ void NetworkChangeNotifierWin::OnObjectSignaled(HANDLE object) {
DCHECK(is_watching_);
is_watching_ = false;
- DWORD bytes;
- BOOL network_changed = GetOverlappedResult(network_change_event_handle_, &addr_overlapped_, &bytes, TRUE);
-
// Start watching for the next address change.
WatchForAddressChange();
- // If network_changed is 0 an error occured (e.g. GetLastError() = 995 = ERROR_OPERATION_ABORTED).
- if (network_changed != 0)
- NotifyObservers();
+ NotifyObservers();
}
void NetworkChangeNotifierWin::NotifyObservers() {
@@ -301,8 +295,8 @@ bool NetworkChangeNotifierWin::WatchForAddressChangeInternal() {
base::Thread::Options(base::MessageLoop::TYPE_IO, 0));
}
-
- DWORD ret = NotifyAddrChange(&network_change_event_handle_, &addr_overlapped_);
+ HANDLE handle = NULL;
+ DWORD ret = NotifyAddrChange(&handle, &addr_overlapped_);
if (ret != ERROR_IO_PENDING)
return false;
diff --git a/net/base/network_change_notifier_win.h b/net/base/network_change_notifier_win.h
index e32b950..7b75c15 100644
--- a/net/base/network_change_notifier_win.h
+++ b/net/base/network_change_notifier_win.h
@@ -90,10 +90,6 @@ class NET_EXPORT_PRIVATE NetworkChangeNotifierWin
base::win::ObjectWatcher addr_watcher_;
OVERLAPPED addr_overlapped_;
- // This file handle receives network change notifications and is used for calling
- // GetOverlappedResult().
- HANDLE network_change_event_handle_;
-
base::OneShotTimer<NetworkChangeNotifierWin> timer_;
// Number of times WatchForAddressChange has failed in a row.