summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWolfgang Wiedmeyer <wolfgit@wiedmeyer.de>2016-03-25 18:25:16 +0100
committerWolfgang Wiedmeyer <wolfgit@wiedmeyer.de>2016-03-25 18:25:16 +0100
commit7f1f0f9f88dce6c9444c1692957af14adcc520f2 (patch)
tree30a96a7434ac98cdcf306cae9d3cfced827764f5
parent489641af5ca5f929f014ebb0af8b32b04bdb68ca (diff)
downloadchromium_src-replicant-6.0.zip
chromium_src-replicant-6.0.tar.gz
chromium_src-replicant-6.0.tar.bz2
remove proprietary Google Play dependenciesreplicant-6.0
Signed-off-by: Wolfgang Wiedmeyer <wolfgit@wiedmeyer.de>
-rw-r--r--DEPS11
-rw-r--r--build/android/PRESUBMIT.py1
-rw-r--r--build/android/play_services/LICENSE.sha11
-rw-r--r--build/android/play_services/__init__.py3
-rw-r--r--build/android/play_services/config.json4
-rw-r--r--build/android/play_services/google_play_services_library.zip.sha11
-rwxr-xr-xbuild/android/play_services/preprocess.py263
-rwxr-xr-xbuild/android/play_services/update.py515
-rwxr-xr-xbuild/android/play_services/update_test.py416
-rw-r--r--build/android/play_services/utils.py161
-rw-r--r--build/config/android/config.gni10
-rw-r--r--build/secondary/third_party/android_tools/BUILD.gn17
-rw-r--r--chrome/android/BUILD.gn4
-rw-r--r--chrome/android/chrome_apk.gyp1
-rw-r--r--chrome/chrome.gyp1
-rw-r--r--chrome/chrome_tests.gypi1
-rw-r--r--remoting/remoting_android.gypi4
-rw-r--r--sync/android/BUILD.gn1
-rw-r--r--sync/sync_android.gypi1
-rw-r--r--third_party/cacheinvalidation/cacheinvalidation.gyp1
20 files changed, 0 insertions, 1417 deletions
diff --git a/DEPS b/DEPS
index 0c7ab7c..a654d36 100644
--- a/DEPS
+++ b/DEPS
@@ -326,17 +326,6 @@ hooks = [
{
'action': [
'python',
- 'src/build/android/play_services/update.py',
- 'download'
- ],
- 'pattern':
- '.',
- 'name':
- 'sdkextras'
- },
- {
- 'action': [
- 'python',
'src/build/linux/sysroot_scripts/install-sysroot.py',
'--running-as-hook'
],
diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py
index 8cfe59c..5ceb6d8 100644
--- a/build/android/PRESUBMIT.py
+++ b/build/android/PRESUBMIT.py
@@ -51,7 +51,6 @@ def CommonChecks(input_api, output_api):
unit_tests=[
J('.', 'emma_coverage_stats_test.py'),
J('gyp', 'util', 'md5_check_test.py'),
- J('play_services', 'update_test.py'),
J('pylib', 'base', 'test_dispatcher_unittest.py'),
J('pylib', 'gtest', 'gtest_test_instance_test.py'),
J('pylib', 'instrumentation',
diff --git a/build/android/play_services/LICENSE.sha1 b/build/android/play_services/LICENSE.sha1
deleted file mode 100644
index 8e606a7..0000000
--- a/build/android/play_services/LICENSE.sha1
+++ /dev/null
@@ -1 +0,0 @@
-11cc73d4b7fa82560fbf5bbc1095dbac30308e7c \ No newline at end of file
diff --git a/build/android/play_services/__init__.py b/build/android/play_services/__init__.py
deleted file mode 100644
index 50b23df..0000000
--- a/build/android/play_services/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
diff --git a/build/android/play_services/config.json b/build/android/play_services/config.json
deleted file mode 100644
index 4a9a9d0..0000000
--- a/build/android/play_services/config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "version_number": 8298000,
- "version_xml_path": "res/values/version.xml"
-}
diff --git a/build/android/play_services/google_play_services_library.zip.sha1 b/build/android/play_services/google_play_services_library.zip.sha1
deleted file mode 100644
index 113d55a..0000000
--- a/build/android/play_services/google_play_services_library.zip.sha1
+++ /dev/null
@@ -1 +0,0 @@
-07308d03b3a83f2985c52e5cfe2764220e19e223 \ No newline at end of file
diff --git a/build/android/play_services/preprocess.py b/build/android/play_services/preprocess.py
deleted file mode 100755
index 0705729..0000000
--- a/build/android/play_services/preprocess.py
+++ /dev/null
@@ -1,263 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-'''Prepares the Google Play services split client libraries before usage by
-Chrome's build system.
-
-We need to preprocess Google Play services before using it in Chrome
-builds for 2 main reasons:
-
-- Getting rid of unused resources: unsupported languages, unused
-drawables, etc.
-
-- Merging the differents jars so that it can be proguarded more
-easily. This is necessary since debug and test apks get very close
-to the dex limit.
-
-The script is supposed to be used with the maven repository that can be
-obtained by downloading the "extra-google-m2repository" from the Android SDK
-Manager. It also supports importing from already extracted AAR files using the
---is-extracted-repo flag. The expected directory structure in that case would
-look like:
-
- REPOSITORY_DIR
- +-- CLIENT_1
- | +-- <content of the first AAR file>
- +-- CLIENT_2
- +-- etc.
-
-The output is a directory with the following structure:
-
- OUT_DIR
- +-- google-play-services.jar
- +-- res
- | +-- CLIENT_1
- | | +-- color
- | | +-- values
- | | +-- etc.
- | +-- CLIENT_2
- | +-- ...
- +-- stub
- +-- res/[.git-keep-directory]
- +-- src/android/UnusedStub.java
-
-Requires the `jar` utility in the path.
-
-'''
-
-import argparse
-import glob
-import itertools
-import os
-import shutil
-import stat
-import sys
-import tempfile
-import zipfile
-
-from datetime import datetime
-
-sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
-import devil_chromium
-from devil.utils import cmd_helper
-from play_services import utils
-from pylib.utils import argparse_utils
-
-
-M2_PKG_PATH = os.path.join('com', 'google', 'android', 'gms')
-
-
-def main():
- parser = argparse.ArgumentParser(description=(
- "Prepares the Google Play services split client libraries before usage "
- "by Chrome's build system. See the script's documentation for more a "
- "detailed help."))
- argparse_utils.CustomHelpAction.EnableFor(parser)
- required_args = parser.add_argument_group('required named arguments')
- required_args.add_argument('-r',
- '--repository',
- help=('the Google Play services repository '
- 'location'),
- required=True,
- metavar='FILE')
- required_args.add_argument('-o',
- '--out-dir',
- help='the output directory',
- required=True,
- metavar='FILE')
- required_args.add_argument('-c',
- '--config-file',
- help='the config file path',
- required=True,
- metavar='FILE')
- parser.add_argument('-x',
- '--is-extracted-repo',
- action='store_true',
- help='the provided repository is not made of AAR files')
- parser.add_argument('--config-help',
- action='custom_help',
- custom_help_text=utils.ConfigParser.__doc__,
- help='show the configuration file format help')
-
- args = parser.parse_args()
-
- devil_chromium.Initialize()
-
- return ProcessGooglePlayServices(args.repository,
- args.out_dir,
- args.config_file,
- args.is_extracted_repo)
-
-
-def ProcessGooglePlayServices(repo, out_dir, config_path, is_extracted_repo):
- config = utils.ConfigParser(config_path)
-
- tmp_root = tempfile.mkdtemp()
- try:
- tmp_paths = _SetupTempDir(tmp_root)
-
- if is_extracted_repo:
- _ImportFromExtractedRepo(config, tmp_paths, repo)
- else:
- _ImportFromAars(config, tmp_paths, repo)
-
- _GenerateCombinedJar(tmp_paths)
- _ProcessResources(config, tmp_paths)
- _BuildOutput(config, tmp_paths, out_dir)
- finally:
- shutil.rmtree(tmp_root)
-
- return 0
-
-
-def _SetupTempDir(tmp_root):
- tmp_paths = {
- 'root': tmp_root,
- 'imported_clients': os.path.join(tmp_root, 'imported_clients'),
- 'extracted_jars': os.path.join(tmp_root, 'jar'),
- 'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'),
- }
- os.mkdir(tmp_paths['imported_clients'])
- os.mkdir(tmp_paths['extracted_jars'])
-
- return tmp_paths
-
-
-def _SetupOutputDir(out_dir):
- out_paths = {
- 'root': out_dir,
- 'res': os.path.join(out_dir, 'res'),
- 'jar': os.path.join(out_dir, 'google-play-services.jar'),
- 'stub': os.path.join(out_dir, 'stub'),
- }
-
- shutil.rmtree(out_paths['jar'], ignore_errors=True)
- shutil.rmtree(out_paths['res'], ignore_errors=True)
- shutil.rmtree(out_paths['stub'], ignore_errors=True)
-
- return out_paths
-
-
-def _MakeWritable(dir_path):
- for root, dirs, files in os.walk(dir_path):
- for path in itertools.chain(dirs, files):
- st = os.stat(os.path.join(root, path))
- os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR)
-
-
-def _ImportFromAars(config, tmp_paths, repo):
- for client in config.clients:
- aar_name = '%s-%s.aar' % (client, config.sdk_version)
- aar_path = os.path.join(repo, M2_PKG_PATH, client,
- config.sdk_version, aar_name)
- aar_out_path = os.path.join(tmp_paths['imported_clients'], client)
- _ExtractAll(aar_path, aar_out_path)
-
- client_jar_path = os.path.join(aar_out_path, 'classes.jar')
- _ExtractAll(client_jar_path, tmp_paths['extracted_jars'])
-
-
-def _ImportFromExtractedRepo(config, tmp_paths, repo):
- # Import the clients
- try:
- for client in config.clients:
- client_out_dir = os.path.join(tmp_paths['imported_clients'], client)
- shutil.copytree(os.path.join(repo, client), client_out_dir)
-
- client_jar_path = os.path.join(client_out_dir, 'classes.jar')
- _ExtractAll(client_jar_path, tmp_paths['extracted_jars'])
- finally:
- _MakeWritable(tmp_paths['imported_clients'])
-
-
-def _GenerateCombinedJar(tmp_paths):
- out_file_name = tmp_paths['combined_jar']
- working_dir = tmp_paths['extracted_jars']
- cmd_helper.Call(['jar', '-cf', out_file_name, '-C', working_dir, '.'])
-
-
-def _ProcessResources(config, tmp_paths):
- LOCALIZED_VALUES_BASE_NAME = 'values-'
- locale_whitelist = set(config.locale_whitelist)
-
- glob_pattern = os.path.join(tmp_paths['imported_clients'], '*', 'res', '*')
- for res_dir in glob.glob(glob_pattern):
- dir_name = os.path.basename(res_dir)
-
- if dir_name.startswith('drawable'):
- shutil.rmtree(res_dir)
- continue
-
- if dir_name.startswith(LOCALIZED_VALUES_BASE_NAME):
- dir_locale = dir_name[len(LOCALIZED_VALUES_BASE_NAME):]
- if dir_locale not in locale_whitelist:
- shutil.rmtree(res_dir)
-
-
-def _BuildOutput(config, tmp_paths, out_dir):
- generation_date = datetime.utcnow()
- version_xml_path = os.path.join(tmp_paths['imported_clients'],
- config.version_xml_path)
- play_services_full_version = utils.GetVersionNumberFromLibraryResources(
- version_xml_path)
-
- out_paths = _SetupOutputDir(out_dir)
-
- # Copy the resources to the output dir
- for client in config.clients:
- res_in_tmp_dir = os.path.join(tmp_paths['imported_clients'], client, 'res')
- if os.path.isdir(res_in_tmp_dir) and os.listdir(res_in_tmp_dir):
- res_in_final_dir = os.path.join(out_paths['res'], client)
- shutil.copytree(res_in_tmp_dir, res_in_final_dir)
-
- # Copy the jar
- shutil.copyfile(tmp_paths['combined_jar'], out_paths['jar'])
-
- # Write the java dummy stub. Needed for gyp to create the resource jar
- stub_location = os.path.join(out_paths['stub'], 'src', 'android')
- os.makedirs(stub_location)
- with open(os.path.join(stub_location, 'UnusedStub.java'), 'w') as stub:
- stub.write('package android;'
- 'public final class UnusedStub {'
- ' private UnusedStub() {}'
- '}')
-
- # Create the main res directory. It is needed by gyp
- stub_res_location = os.path.join(out_paths['stub'], 'res')
- os.makedirs(stub_res_location)
- with open(os.path.join(stub_res_location, '.res-stamp'), 'w') as stamp:
- content_str = 'google_play_services_version: %s\nutc_date: %s\n'
- stamp.write(content_str % (play_services_full_version, generation_date))
-
- config.UpdateVersionNumber(play_services_full_version)
-
-
-def _ExtractAll(zip_path, out_path):
- with zipfile.ZipFile(zip_path, 'r') as zip_file:
- zip_file.extractall(out_path)
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/android/play_services/update.py b/build/android/play_services/update.py
deleted file mode 100755
index 8a70325..0000000
--- a/build/android/play_services/update.py
+++ /dev/null
@@ -1,515 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-'''
-Script to help uploading and downloading the Google Play services library to
-and from a Google Cloud storage.
-'''
-
-import argparse
-import logging
-import os
-import re
-import shutil
-import sys
-import tempfile
-import zipfile
-
-sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
-import devil_chromium
-from devil.utils import cmd_helper
-from play_services import utils
-from pylib import constants
-from pylib.constants import host_paths
-from pylib.utils import logging_utils
-
-sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build'))
-import find_depot_tools # pylint: disable=import-error,unused-import
-import breakpad
-import download_from_google_storage
-import upload_to_google_storage
-
-
-# Directory where the SHA1 files for the zip and the license are stored
-# It should be managed by git to provided information about new versions.
-SHA1_DIRECTORY = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'android',
- 'play_services')
-
-# Default bucket used for storing the files.
-GMS_CLOUD_STORAGE = 'chromium-android-tools/play-services'
-
-# Path to the default configuration file. It exposes the currently installed
-# version of the library in a human readable way.
-CONFIG_DEFAULT_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build',
- 'android', 'play_services', 'config.json')
-
-LICENSE_FILE_NAME = 'LICENSE'
-ZIP_FILE_NAME = 'google_play_services_library.zip'
-GMS_PACKAGE_ID = 'extra-google-google_play_services' # used by sdk manager
-
-LICENSE_PATTERN = re.compile(r'^Pkg\.License=(?P<text>.*)$', re.MULTILINE)
-
-
-def main(raw_args):
- parser = argparse.ArgumentParser(
- description=__doc__ + 'Please see the subcommand help for more details.',
- formatter_class=utils.DefaultsRawHelpFormatter)
- subparsers = parser.add_subparsers(title='commands')
-
- # Download arguments
- parser_download = subparsers.add_parser(
- 'download',
- help='download the library from the cloud storage',
- description=Download.__doc__,
- formatter_class=utils.DefaultsRawHelpFormatter)
- parser_download.set_defaults(func=Download)
- AddBasicArguments(parser_download)
- AddBucketArguments(parser_download)
-
- # SDK Update arguments
- parser_sdk = subparsers.add_parser(
- 'sdk',
- help='get the latest Google Play services SDK using Android SDK Manager',
- description=UpdateSdk.__doc__,
- formatter_class=utils.DefaultsRawHelpFormatter)
- parser_sdk.set_defaults(func=UpdateSdk)
- AddBasicArguments(parser_sdk)
-
- # Upload arguments
- parser_upload = subparsers.add_parser(
- 'upload',
- help='upload the library to the cloud storage',
- description=Upload.__doc__,
- formatter_class=utils.DefaultsRawHelpFormatter)
-
- parser_upload.add_argument('--skip-git',
- action='store_true',
- help="don't commit the changes at the end")
- parser_upload.set_defaults(func=Upload)
- AddBasicArguments(parser_upload)
- AddBucketArguments(parser_upload)
-
- args = parser.parse_args(raw_args)
- if args.verbose:
- logging.basicConfig(level=logging.DEBUG)
- logging_utils.ColorStreamHandler.MakeDefault(not _IsBotEnvironment())
- devil_chromium.Initialize()
- return args.func(args)
-
-
-def AddBasicArguments(parser):
- '''
- Defines the common arguments on subparser rather than the main one. This
- allows to put arguments after the command: `foo.py upload --debug --force`
- instead of `foo.py --debug upload --force`
- '''
-
- parser.add_argument('--sdk-root',
- help='base path to the Android SDK tools root',
- default=constants.ANDROID_SDK_ROOT)
-
- parser.add_argument('-v', '--verbose',
- action='store_true',
- help='print debug information')
-
-
-def AddBucketArguments(parser):
- parser.add_argument('--bucket',
- help='name of the bucket where the files are stored',
- default=GMS_CLOUD_STORAGE)
-
- parser.add_argument('--config',
- help='JSON Configuration file',
- default=CONFIG_DEFAULT_PATH)
-
- parser.add_argument('--dry-run',
- action='store_true',
- help=('run the script in dry run mode. Files will be '
- 'copied to a local directory instead of the '
- 'cloud storage. The bucket name will be as path '
- 'to that directory relative to the repository '
- 'root.'))
-
- parser.add_argument('-f', '--force',
- action='store_true',
- help='run even if the library is already up to date')
-
-
-def Download(args):
- '''
- Downloads the Google Play services library from a Google Cloud Storage bucket
- and installs it to
- //third_party/android_tools/sdk/extras/google/google_play_services.
-
- A license check will be made, and the user might have to accept the license
- if that has not been done before.
- '''
-
- if not os.path.isdir(args.sdk_root):
- logging.debug('Did not find the Android SDK root directory at "%s".',
- args.sdk_root)
- if not args.force:
- logging.info('Skipping, not on an android checkout.')
- return 0
-
- config = utils.ConfigParser(args.config)
- paths = PlayServicesPaths(args.sdk_root, config.version_xml_path)
-
- if os.path.isdir(paths.package) and not os.access(paths.package, os.W_OK):
- logging.error('Failed updating the Google Play Services library. '
- 'The location is not writable. Please remove the '
- 'directory (%s) and try again.', paths.package)
- return -2
-
- new_lib_zip_sha1 = os.path.join(SHA1_DIRECTORY, ZIP_FILE_NAME + '.sha1')
-
- logging.debug('Comparing zip hashes: %s and %s', new_lib_zip_sha1,
- paths.lib_zip_sha1)
- if utils.FileEquals(new_lib_zip_sha1, paths.lib_zip_sha1) and not args.force:
- logging.info('Skipping, the Google Play services library is up to date.')
- return 0
-
- bucket_path = _VerifyBucketPathFormat(args.bucket,
- config.version_number,
- args.dry_run)
-
- tmp_root = tempfile.mkdtemp()
- try:
- # setup the destination directory
- if not os.path.isdir(paths.package):
- os.makedirs(paths.package)
-
- # download license file from bucket/{version_number}/license.sha1
- new_license = os.path.join(tmp_root, LICENSE_FILE_NAME)
-
- license_sha1 = os.path.join(SHA1_DIRECTORY, LICENSE_FILE_NAME + '.sha1')
- _DownloadFromBucket(bucket_path, license_sha1, new_license,
- args.verbose, args.dry_run)
-
- if (not _IsBotEnvironment() and
- not _CheckLicenseAgreement(new_license, paths.license,
- config.version_number)):
- logging.warning('Your version of the Google Play services library is '
- 'not up to date. You might run into issues building '
- 'or running the app. Please run `%s download` to '
- 'retry downloading it.', __file__)
- return 0
-
- new_lib_zip = os.path.join(tmp_root, ZIP_FILE_NAME)
- _DownloadFromBucket(bucket_path, new_lib_zip_sha1, new_lib_zip,
- args.verbose, args.dry_run)
-
- try:
- # We remove the current version of the Google Play services SDK.
- if os.path.exists(paths.package):
- shutil.rmtree(paths.package)
- os.makedirs(paths.package)
-
- logging.debug('Extracting the library to %s', paths.lib)
- with zipfile.ZipFile(new_lib_zip, "r") as new_lib_zip_file:
- new_lib_zip_file.extractall(paths.lib)
-
- logging.debug('Copying %s to %s', new_license, paths.license)
- shutil.copy(new_license, paths.license)
-
- logging.debug('Copying %s to %s', new_lib_zip_sha1, paths.lib_zip_sha1)
- shutil.copy(new_lib_zip_sha1, paths.lib_zip_sha1)
-
- logging.info('Update complete.')
-
- except Exception as e: # pylint: disable=broad-except
- logging.error('Failed updating the Google Play Services library. '
- 'An error occurred while installing the new version in '
- 'the SDK directory: %s ', e)
- return -3
- finally:
- shutil.rmtree(tmp_root)
-
- return 0
-
-
-def UpdateSdk(args):
- '''
- Uses the Android SDK Manager to download the latest Google Play services SDK
- locally. Its usual installation path is
- //third_party/android_tools/sdk/extras/google/google_play_services
- '''
-
- # This should function should not run on bots and could fail for many user
- # and setup related reasons. Also, exceptions here are not caught, so we
- # disable breakpad to avoid spamming the logs.
- breakpad.IS_ENABLED = False
-
- sdk_manager = os.path.join(args.sdk_root, 'tools', 'android')
- cmd = [sdk_manager, 'update', 'sdk', '--no-ui', '--filter', GMS_PACKAGE_ID]
- cmd_helper.Call(cmd)
- # If no update is needed, it still returns successfully so we just do nothing
-
- return 0
-
-
-def Upload(args):
- '''
- Uploads the library from the local Google Play services SDK to a Google Cloud
- storage bucket.
-
- By default, a local commit will be made at the end of the operation.
- '''
-
- # This should function should not run on bots and could fail for many user
- # and setup related reasons. Also, exceptions here are not caught, so we
- # disable breakpad to avoid spamming the logs.
- breakpad.IS_ENABLED = False
-
- config = utils.ConfigParser(args.config)
- paths = PlayServicesPaths(args.sdk_root, config.version_xml_path)
-
- if not args.skip_git and utils.IsRepoDirty(host_paths.DIR_SOURCE_ROOT):
- logging.error('The repo is dirty. Please commit or stash your changes.')
- return -1
-
- new_version_number = utils.GetVersionNumberFromLibraryResources(
- paths.version_xml)
- logging.debug('comparing versions: new=%d, old=%s',
- new_version_number, config.version_number)
- if new_version_number <= config.version_number and not args.force:
- logging.info('The checked in version of the library is already the latest '
- 'one. No update is needed. Please rerun with --force to skip '
- 'this check.')
- return 0
-
- tmp_root = tempfile.mkdtemp()
- try:
- new_lib_zip = os.path.join(tmp_root, ZIP_FILE_NAME)
- new_license = os.path.join(tmp_root, LICENSE_FILE_NAME)
-
- # need to strip '.zip' from the file name here
- shutil.make_archive(new_lib_zip[:-4], 'zip', paths.lib)
- _ExtractLicenseFile(new_license, paths.source_prop)
-
- bucket_path = _VerifyBucketPathFormat(args.bucket, new_version_number,
- args.dry_run)
- files_to_upload = [new_lib_zip, new_license]
- logging.debug('Uploading %s to %s', files_to_upload, bucket_path)
- _UploadToBucket(bucket_path, files_to_upload, args.dry_run)
-
- new_lib_zip_sha1 = os.path.join(SHA1_DIRECTORY,
- ZIP_FILE_NAME + '.sha1')
- new_license_sha1 = os.path.join(SHA1_DIRECTORY,
- LICENSE_FILE_NAME + '.sha1')
- shutil.copy(new_lib_zip + '.sha1', new_lib_zip_sha1)
- shutil.copy(new_license + '.sha1', new_license_sha1)
- finally:
- shutil.rmtree(tmp_root)
-
- config.UpdateVersionNumber(new_version_number)
-
- if not args.skip_git:
- commit_message = ('Update the Google Play services dependency to %s\n'
- '\n') % new_version_number
- utils.MakeLocalCommit(host_paths.DIR_SOURCE_ROOT,
- [new_lib_zip_sha1, new_license_sha1, config.path],
- commit_message)
-
- return 0
-
-
-def _DownloadFromBucket(bucket_path, sha1_file, destination, verbose,
- is_dry_run):
- '''Downloads the file designated by the provided sha1 from a cloud bucket.'''
-
- download_from_google_storage.download_from_google_storage(
- input_filename=sha1_file,
- base_url=bucket_path,
- gsutil=_InitGsutil(is_dry_run),
- num_threads=1,
- directory=None,
- recursive=False,
- force=False,
- output=destination,
- ignore_errors=False,
- sha1_file=sha1_file,
- verbose=verbose,
- auto_platform=True,
- extract=False)
-
-
-def _UploadToBucket(bucket_path, files_to_upload, is_dry_run):
- '''Uploads the files designated by the provided paths to a cloud bucket. '''
-
- upload_to_google_storage.upload_to_google_storage(
- input_filenames=files_to_upload,
- base_url=bucket_path,
- gsutil=_InitGsutil(is_dry_run),
- force=False,
- use_md5=False,
- num_threads=1,
- skip_hashing=False,
- gzip=None)
-
-
-def _InitGsutil(is_dry_run):
- '''Initialize the Gsutil object as regular or dummy version for dry runs. '''
-
- if is_dry_run:
- return DummyGsutil()
- else:
- return download_from_google_storage.Gsutil(
- download_from_google_storage.GSUTIL_DEFAULT_PATH)
-
-
-def _ExtractLicenseFile(license_path, prop_file_path):
- with open(prop_file_path, 'r') as prop_file:
- prop_file_content = prop_file.read()
-
- match = LICENSE_PATTERN.search(prop_file_content)
- if not match:
- raise AttributeError('The license was not found in ' +
- os.path.abspath(prop_file_path))
-
- with open(license_path, 'w') as license_file:
- license_file.write(match.group('text'))
-
-
-def _CheckLicenseAgreement(expected_license_path, actual_license_path,
- version_number):
- '''
- Checks that the new license is the one already accepted by the user. If it
- isn't, it prompts the user to accept it. Returns whether the expected license
- has been accepted.
- '''
-
- if utils.FileEquals(expected_license_path, actual_license_path):
- return True
-
- with open(expected_license_path) as license_file:
- # Uses plain print rather than logging to make sure this is not formatted
- # by the logger.
- print ('Updating the Google Play services SDK to '
- 'version %d.' % version_number)
-
- # The output is buffered when running as part of gclient hooks. We split
- # the text here and flush is explicitly to avoid having part of it dropped
- # out.
- # Note: text contains *escaped* new lines, so we split by '\\n', not '\n'.
- for license_part in license_file.read().split('\\n'):
- print license_part
- sys.stdout.flush()
-
- # Need to put the prompt on a separate line otherwise the gclient hook buffer
- # only prints it after we received an input.
- print 'Do you accept the license? [y/n]: '
- sys.stdout.flush()
- return raw_input('> ') in ('Y', 'y')
-
-
-def _IsBotEnvironment():
- return bool(os.environ.get('CHROME_HEADLESS'))
-
-
-def _VerifyBucketPathFormat(bucket_name, version_number, is_dry_run):
- '''
- Formats and checks the download/upload path depending on whether we are
- running in dry run mode or not. Returns a supposedly safe path to use with
- Gsutil.
- '''
-
- if is_dry_run:
- bucket_path = os.path.abspath(os.path.join(bucket_name,
- str(version_number)))
- if not os.path.isdir(bucket_path):
- os.makedirs(bucket_path)
- else:
- if bucket_name.startswith('gs://'):
- # We enforce the syntax without gs:// for consistency with the standalone
- # download/upload scripts and to make dry run transition easier.
- raise AttributeError('Please provide the bucket name without the gs:// '
- 'prefix (e.g. %s)' % GMS_CLOUD_STORAGE)
- bucket_path = 'gs://%s/%d' % (bucket_name, version_number)
-
- return bucket_path
-
-
-class PlayServicesPaths(object):
- '''
- Describes the different paths to be used in the update process.
-
- Filesystem hierarchy | Exposed property / notes
- ---------------------------------------------------|-------------------------
- [sdk_root] | sdk_root / (1)
- +- extras |
- +- google |
- +- google_play_services | package / (2)
- +- source.properties | source_prop / (3)
- +- LICENSE | license / (4)
- +- google_play_services_library.zip.sha1 | lib_zip_sha1 / (5)
- +- libproject |
- +- google-play-services_lib | lib / (6)
- +- res |
- +- values |
- +- version.xml | version_xml (7)
-
- Notes:
-
- 1. sdk_root: Path provided as a parameter to the script (--sdk_root)
- 2. package: This directory contains the Google Play services SDK itself.
- When downloaded via the Android SDK manager, it will contain,
- documentation, samples and other files in addition to the library. When
- the update script downloads the library from our cloud storage, it is
- cleared.
- 3. source_prop: File created by the Android SDK manager that contains
- the package information, such as the version info and the license.
- 4. license: File created by the update script. Contains the license accepted
- by the user.
- 5. lib_zip_sha1: sha1 of the library zip that has been installed by the
- update script. It is compared with the one required by the config file to
- check if an update is necessary.
- 6. lib: Contains the library itself: jar and resources. This is what is
- downloaded from the cloud storage.
- 7. version_xml: File that contains the exact Google Play services library
- version, the one that we track. The version looks like 811500, is used in
- the code and the on-device APK, as opposed to the SDK package version
- which looks like 27.0.0 and is used only by the Android SDK manager.
-
- '''
-
- def __init__(self, sdk_root, version_xml_path):
- relative_package = os.path.join('extras', 'google', 'google_play_services')
- relative_lib = os.path.join(relative_package, 'libproject',
- 'google-play-services_lib')
- self.sdk_root = sdk_root
-
- self.package = os.path.join(sdk_root, relative_package)
- self.lib_zip_sha1 = os.path.join(self.package, ZIP_FILE_NAME + '.sha1')
- self.license = os.path.join(self.package, LICENSE_FILE_NAME)
- self.source_prop = os.path.join(self.package, 'source.properties')
-
- self.lib = os.path.join(sdk_root, relative_lib)
- self.version_xml = os.path.join(self.lib, version_xml_path)
-
-
-class DummyGsutil(download_from_google_storage.Gsutil):
- '''
- Class that replaces Gsutil to use a local directory instead of an online
- bucket. It relies on the fact that Gsutil commands are very similar to shell
- ones, so for the ones used here (ls, cp), it works to just use them with a
- local directory.
- '''
-
- def __init__(self):
- super(DummyGsutil, self).__init__(
- download_from_google_storage.GSUTIL_DEFAULT_PATH)
-
- def call(self, *args):
- logging.debug('Calling command "%s"', str(args))
- return cmd_helper.GetCmdStatusOutputAndError(args)
-
- def check_call(self, *args):
- logging.debug('Calling command "%s"', str(args))
- return cmd_helper.GetCmdStatusOutputAndError(args)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/build/android/play_services/update_test.py b/build/android/play_services/update_test.py
deleted file mode 100755
index fd68154..0000000
--- a/build/android/play_services/update_test.py
+++ /dev/null
@@ -1,416 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-'''Unittests for update.py.
-
-They set up a temporary directory that is used to mock a bucket, the directory
-containing the configuration files and the android sdk directory.
-
-Tests run the script with various inputs and check the status of the filesystem
-'''
-
-import shutil
-import tempfile
-import unittest
-import os
-import sys
-import zipfile
-import contextlib
-
-sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
-from play_services import update
-
-
-class TestFunctions(unittest.TestCase):
- DEFAULT_CONFIG_VERSION = 42
- DEFAULT_LICENSE = 'Default License'
- DEFAULT_ZIP_SHA1 = 'zip0and0filling0to0forty0chars0000000000'
-
- def __init__(self, *args, **kwargs):
- super(TestFunctions, self).__init__(*args, **kwargs)
- self.paths = None # Initialized in SetUpWorkdir
- self.workdir = None # Initialized in setUp
-
- #override
- def setUp(self):
- self.workdir = tempfile.mkdtemp()
-
- #override
- def tearDown(self):
- shutil.rmtree(self.workdir)
- self.workdir = None
-
- def testUpload(self):
- version = 1337
- self.SetUpWorkdir(
- xml_version=version,
- gms_lib=True,
- source_prop=True)
-
- status = update.main([
- 'upload',
- '--dry-run',
- '--skip-git',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root
- ])
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # bucket should contain license, name = license.sha1
- self.assertTrue(os.path.isfile(self.paths.config_license_sha1))
- license_sha1 = _GetFileContent(self.paths.config_license_sha1)
- bucket_license = os.path.join(self.paths.bucket, str(version),
- license_sha1)
- self.assertTrue(os.path.isfile(bucket_license))
- self.assertEqual(_GetFileContent(bucket_license), self.DEFAULT_LICENSE)
-
- # bucket should contain zip, name = zip.sha1
- self.assertTrue(os.path.isfile(self.paths.config_zip_sha1))
- bucket_zip = os.path.join(self.paths.bucket, str(version),
- _GetFileContent(self.paths.config_zip_sha1))
- self.assertTrue(os.path.isfile(bucket_zip))
-
- # unzip, should contain expected files
- with zipfile.ZipFile(bucket_zip, "r") as bucket_zip_file:
- self.assertEqual(bucket_zip_file.namelist(),
- ['dummy_file', 'res/values/version.xml'])
-
- def testUploadAlreadyLatestVersion(self):
- self.SetUpWorkdir(
- xml_version=self.DEFAULT_CONFIG_VERSION,
- gms_lib=True,
- source_prop=True)
-
- status = update.main([
- 'upload',
- '--dry-run',
- '--skip-git',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # bucket should be empty
- self.assertFalse(os.listdir(self.paths.bucket))
- self.assertFalse(os.path.isfile(self.paths.config_license_sha1))
- self.assertFalse(os.path.isfile(self.paths.config_zip_sha1))
-
- def testDownload(self):
- self.SetUpWorkdir(populate_bucket=True)
-
- with _MockedInput('y'):
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # sdk_root should contain zip contents, zip sha1, license
- self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib,
- 'dummy_file')))
- self.assertTrue(os.path.isfile(self.paths.gms_root_sha1))
- self.assertTrue(os.path.isfile(self.paths.gms_root_license))
- self.assertEquals(_GetFileContent(self.paths.gms_root_license),
- self.DEFAULT_LICENSE)
-
- def testDownloadBot(self):
- self.SetUpWorkdir(populate_bucket=True, bot_env=True)
-
- # No need to type 'y' on bots
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # sdk_root should contain zip contents, zip sha1, license
- self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib,
- 'dummy_file')))
- self.assertTrue(os.path.isfile(self.paths.gms_root_sha1))
- self.assertTrue(os.path.isfile(self.paths.gms_root_license))
- self.assertEquals(_GetFileContent(self.paths.gms_root_license),
- self.DEFAULT_LICENSE)
-
- def testDownloadAlreadyUpToDate(self):
- self.SetUpWorkdir(
- populate_bucket=True,
- existing_zip_sha1=self.DEFAULT_ZIP_SHA1)
-
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # there should not be new files downloaded to sdk_root
- self.assertFalse(os.path.isfile(os.path.join(self.paths.gms_lib,
- 'dummy_file')))
- self.assertFalse(os.path.isfile(self.paths.gms_root_license))
-
- def testDownloadAcceptedLicense(self):
- self.SetUpWorkdir(
- populate_bucket=True,
- existing_license=self.DEFAULT_LICENSE)
-
- # License already accepted, no need to type
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # sdk_root should contain zip contents, zip sha1, license
- self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib,
- 'dummy_file')))
- self.assertTrue(os.path.isfile(self.paths.gms_root_sha1))
- self.assertTrue(os.path.isfile(self.paths.gms_root_license))
- self.assertEquals(_GetFileContent(self.paths.gms_root_license),
- self.DEFAULT_LICENSE)
-
- def testDownloadNewLicense(self):
- self.SetUpWorkdir(
- populate_bucket=True,
- existing_license='Old license')
-
- with _MockedInput('y'):
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # sdk_root should contain zip contents, zip sha1, NEW license
- self.assertTrue(os.path.isfile(os.path.join(self.paths.gms_lib,
- 'dummy_file')))
- self.assertTrue(os.path.isfile(self.paths.gms_root_sha1))
- self.assertTrue(os.path.isfile(self.paths.gms_root_license))
- self.assertEquals(_GetFileContent(self.paths.gms_root_license),
- self.DEFAULT_LICENSE)
-
- def testDownloadRefusedLicense(self):
- self.SetUpWorkdir(
- populate_bucket=True,
- existing_license='Old license')
-
- with _MockedInput('n'):
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', self.paths.sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
-
- # there should not be new files downloaded to sdk_root
- self.assertFalse(os.path.isfile(os.path.join(self.paths.gms_lib,
- 'dummy_file')))
- self.assertEquals(_GetFileContent(self.paths.gms_root_license),
- 'Old license')
-
- def testDownloadNoAndroidSDK(self):
- self.SetUpWorkdir(
- populate_bucket=True,
- existing_license='Old license')
-
- non_existing_sdk_root = os.path.join(self.workdir, 'non_existing_sdk_root')
- # Should not run, no typing needed
- status = update.main([
- 'download',
- '--dry-run',
- '--bucket', self.paths.bucket,
- '--config', self.paths.config_file,
- '--sdk-root', non_existing_sdk_root,
- ])
-
- self.assertEqual(status, 0, 'the command should have succeeded.')
- self.assertFalse(os.path.isdir(non_existing_sdk_root))
-
- def SetUpWorkdir(self,
- bot_env=False,
- config_version=DEFAULT_CONFIG_VERSION,
- existing_license=None,
- existing_zip_sha1=None,
- gms_lib=False,
- populate_bucket=False,
- source_prop=None,
- xml_version=None):
- '''Prepares workdir by putting it in the specified state
-
- Args:
- - general
- bot_env: sets or unsets CHROME_HEADLESS
-
- - bucket
- populate_bucket: boolean. Populate the bucket with a zip and license
- file. The sha1s will be copied to the config directory
-
- - config
- config_version: number. Version of the current SDK. Defaults to
- `self.DEFAULT_CONFIG_VERSION`
-
- - sdk_root
- existing_license: string. Create a LICENSE file setting the specified
- text as content of the currently accepted license.
- existing_zip_sha1: string. Create a sha1 file setting the specified
- hash as hash of the SDK supposed to be installed
- gms_lib: boolean. Create a dummy file in the location of the play
- services SDK.
- source_prop: boolean. Create a source.properties file that contains
- the license to upload.
- xml_version: number. Create a version.xml file with the specified
- version that is used when uploading
- '''
- self.paths = Paths(self.workdir)
-
- # Create the main directories
- _MakeDirs(self.paths.sdk_root)
- _MakeDirs(self.paths.config_dir)
- _MakeDirs(self.paths.bucket)
-
- # is not configured via argument.
- update.SHA1_DIRECTORY = self.paths.config_dir
-
- os.environ['CHROME_HEADLESS'] = '1' if bot_env else ''
-
- if config_version:
- _MakeDirs(os.path.dirname(self.paths.config_file))
- with open(self.paths.config_file, 'w') as stream:
- stream.write(('{"version_number":%d,'
- '"version_xml_path": "res/values/version.xml"}'
- '\n') % config_version)
-
- if existing_license:
- _MakeDirs(self.paths.gms_root)
- with open(self.paths.gms_root_license, 'w') as stream:
- stream.write(existing_license)
-
- if existing_zip_sha1:
- _MakeDirs(self.paths.gms_root)
- with open(self.paths.gms_root_sha1, 'w') as stream:
- stream.write(existing_zip_sha1)
-
- if gms_lib:
- _MakeDirs(self.paths.gms_lib)
- with open(os.path.join(self.paths.gms_lib, 'dummy_file'), 'w') as stream:
- stream.write('foo\n')
-
- if source_prop:
- _MakeDirs(os.path.dirname(self.paths.source_prop))
- with open(self.paths.source_prop, 'w') as stream:
- stream.write('Foo=Bar\n'
- 'Pkg.License=%s\n'
- 'Baz=Fizz\n' % self.DEFAULT_LICENSE)
-
- if populate_bucket:
- _MakeDirs(self.paths.config_dir)
- bucket_dir = os.path.join(self.paths.bucket, str(config_version))
- _MakeDirs(bucket_dir)
-
- # TODO(dgn) should we use real sha1s? comparison with the real sha1 is
- # done but does not do anything other than displaying a message.
- config_license_sha1 = 'license0and0filling0to0forty0chars000000'
- with open(self.paths.config_license_sha1, 'w') as stream:
- stream.write(config_license_sha1)
-
- with open(os.path.join(bucket_dir, config_license_sha1), 'w') as stream:
- stream.write(self.DEFAULT_LICENSE)
-
- config_zip_sha1 = self.DEFAULT_ZIP_SHA1
- with open(self.paths.config_zip_sha1, 'w') as stream:
- stream.write(config_zip_sha1)
-
- pre_zip_lib = os.path.join(self.workdir, 'pre_zip_lib')
- post_zip_lib = os.path.join(bucket_dir, config_zip_sha1)
- _MakeDirs(pre_zip_lib)
- with open(os.path.join(pre_zip_lib, 'dummy_file'), 'w') as stream:
- stream.write('foo\n')
- shutil.make_archive(post_zip_lib, 'zip', pre_zip_lib)
- # make_archive appends .zip
- shutil.move(post_zip_lib + '.zip', post_zip_lib)
-
- if xml_version:
- _MakeDirs(os.path.dirname(self.paths.xml_version))
- with open(self.paths.xml_version, 'w') as stream:
- stream.write(
- '<?xml version="1.0" encoding="utf-8"?>\n'
- '<resources>\n'
- ' <integer name="google_play_services_version">%d</integer>\n'
- '</resources>\n' % xml_version)
-
-
-class Paths(object):
- '''Declaration of the paths commonly manipulated in the tests.'''
-
- def __init__(self, workdir):
- self.bucket = os.path.join(workdir, 'bucket')
-
- self.config_dir = os.path.join(workdir, 'config')
- self.config_file = os.path.join(self.config_dir, 'config.json')
- self.config_license_sha1 = os.path.join(self.config_dir, 'LICENSE.sha1')
- self.config_zip_sha1 = os.path.join(
- self.config_dir,
- 'google_play_services_library.zip.sha1')
-
- self.sdk_root = os.path.join(workdir, 'sdk_root')
- self.gms_root = os.path.join(self.sdk_root, 'extras', 'google',
- 'google_play_services')
- self.gms_root_sha1 = os.path.join(self.gms_root,
- 'google_play_services_library.zip.sha1')
- self.gms_root_license = os.path.join(self.gms_root, 'LICENSE')
- self.source_prop = os.path.join(self.gms_root, 'source.properties')
- self.gms_lib = os.path.join(self.gms_root, 'libproject',
- 'google-play-services_lib')
- self.xml_version = os.path.join(self.gms_lib, 'res', 'values',
- 'version.xml')
-
-
-def _GetFileContent(file_path):
- with open(file_path, 'r') as stream:
- return stream.read()
-
-
-def _MakeDirs(path):
- '''Avoids having to do the error handling everywhere.'''
- if not os.path.exists(path):
- os.makedirs(path)
-
-
-@contextlib.contextmanager
-def _MockedInput(typed_string):
- '''Makes raw_input return |typed_string| while inside the context.'''
- try:
- original_raw_input = __builtins__.raw_input
- __builtins__.raw_input = lambda _: typed_string
- yield
- finally:
- __builtins__.raw_input = original_raw_input
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/build/android/play_services/utils.py b/build/android/play_services/utils.py
deleted file mode 100644
index 0e6d5a8..0000000
--- a/build/android/play_services/utils.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-'''
-Utility functions for all things related to manipulating google play services
-related files.
-'''
-
-import argparse
-import filecmp
-import json
-import logging
-import os
-import re
-import sys
-
-sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
-from devil.utils import cmd_helper
-
-
-_XML_VERSION_NUMBER_PATTERN = re.compile(
- r'<integer name="google_play_services_version">(\d+)<\/integer>')
-
-
-class DefaultsRawHelpFormatter(argparse.ArgumentDefaultsHelpFormatter,
- argparse.RawDescriptionHelpFormatter):
- '''
- Combines the features of RawDescriptionHelpFormatter and
- ArgumentDefaultsHelpFormatter, providing defaults for the arguments and raw
- text for the description.
- '''
- pass
-
-
-class ConfigParser(object):
- '''Reads and writes the configuration files for play services related scripts
-
- The configuration files are JSON files. Here is the data they are expected
- to contain:
-
- - version_number
- Number. Mirrors @integer/google_play_services_version from the library.
- Example: 815000
-
- - sdk_version
- Version of the Play Services SDK to retrieve, when preprocessing the
- library from a maven/gradle repository.
- Example: "8.1.0"
-
- - clients
- List of strings. Name of the clients (or play services modules) to
- include when preprocessing the library.
- Example: ["play-services-base", "play-services-cast"]
-
- - version_xml_path
- String. Path to the version.xml string describing the current version.
- Should be relative to the library base directory
- Example: "res/values/version.xml"
-
- - locale_whitelist
- List of strings. List of locales to keep from the resources. Can be
- obtained by generating an android build and looking at the content of
- `out/Debug/gen/chrome/java/res`; or looking at the android section in
- `//chrome/app/generated_resources.grd`
- Example: ["am", "ar", "bg", "ca", "cs"]
-
- '''
- _VERSION_NUMBER_KEY = 'version_number'
-
- def __init__(self, path):
- self.path = path
- self._data = {}
-
- with open(path, 'r') as stream:
- self._data = json.load(stream)
-
- @property
- def version_number(self):
- return self._data.get(self._VERSION_NUMBER_KEY)
-
- @property
- def sdk_version(self):
- return self._data.get('sdk_version')
-
- @property
- def clients(self):
- return self._data.get('clients') or []
-
- @property
- def version_xml_path(self):
- return self._data.get('version_xml_path')
-
- @property
- def locale_whitelist(self):
- return self._data.get('locale_whitelist') or []
-
- def UpdateVersionNumber(self, new_version_number):
- '''Updates the version number and saves it in the configuration file. '''
-
- with open(self.path, 'w') as stream:
- self._data[self._VERSION_NUMBER_KEY] = new_version_number
- stream.write(DumpTrimmedJson(self._data))
-
-
-def DumpTrimmedJson(json_data):
- '''
- Default formatting when dumping json to string has trailing spaces and lacks
- a new line at the end. This function fixes that.
- '''
-
- out = json.dumps(json_data, sort_keys=True, indent=2)
- out = out.replace(' ' + os.linesep, os.linesep)
- return out + os.linesep
-
-
-def FileEquals(expected_file, actual_file):
- '''
- Returns whether the two files are equal. Returns False if any of the files
- doesn't exist.
- '''
-
- if not os.path.isfile(actual_file) or not os.path.isfile(expected_file):
- return False
- return filecmp.cmp(expected_file, actual_file)
-
-
-def IsRepoDirty(repo_root):
- '''Returns True if there are no staged or modified files, False otherwise.'''
-
- # diff-index returns 1 if there are staged changes or modified files,
- # 0 otherwise
- cmd = ['git', 'diff-index', '--quiet', 'HEAD']
- return cmd_helper.Call(cmd, cwd=repo_root) == 1
-
-
-def GetVersionNumberFromLibraryResources(version_xml):
- '''
- Extracts a Google Play services version number from its version.xml file.
- '''
-
- with open(version_xml, 'r') as version_file:
- version_file_content = version_file.read()
-
- match = _XML_VERSION_NUMBER_PATTERN.search(version_file_content)
- if not match:
- raise AttributeError('A value for google_play_services_version was not '
- 'found in ' + version_xml)
- return int(match.group(1))
-
-
-def MakeLocalCommit(repo_root, files_to_commit, message):
- '''Makes a local git commit.'''
-
- logging.debug('Staging files (%s) for commit.', files_to_commit)
- if cmd_helper.Call(['git', 'add'] + files_to_commit, cwd=repo_root) != 0:
- raise Exception('The local commit failed.')
-
- logging.debug('Committing.')
- if cmd_helper.Call(['git', 'commit', '-m', message], cwd=repo_root) != 0:
- raise Exception('The local commit failed.')
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
index aff3b2f..bd4abeb 100644
--- a/build/config/android/config.gni
+++ b/build/config/android/config.gni
@@ -30,16 +30,6 @@ if (is_android) {
default_android_keystore_password = "chromium"
}
- if (!defined(google_play_services_library)) {
- google_play_services_library =
- "//third_party/android_tools:google_play_services_default_java"
- }
-
- if (!defined(google_play_services_resources)) {
- google_play_services_resources =
- "//third_party/android_tools:google_play_services_default_resources"
- }
-
if (!defined(webview_framework_jar)) {
webview_framework_jar =
"//third_party/android_platform/webview/frameworks_6.0.jar"
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
index e256258..0109611 100644
--- a/build/secondary/third_party/android_tools/BUILD.gn
+++ b/build/secondary/third_party/android_tools/BUILD.gn
@@ -101,23 +101,6 @@ android_java_prebuilt("android_support_v7_recyclerview_java") {
jar_path = "$android_sdk_root/extras/android/support/v7/recyclerview/libs/android-support-v7-recyclerview.jar"
}
-android_resources("google_play_services_default_resources") {
- v14_skip = true
- resource_dirs = [ "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/res" ]
- custom_package = "com.google.android.gms"
-}
-android_java_prebuilt("google_play_services_default_java") {
- deps = [
- ":android_support_v13_java",
- ":android_support_v7_mediarouter_java",
- ":google_play_services_default_resources",
- ":legacy_http_javalib",
- ]
- proguard_preprocess = true
- proguard_config = "//third_party/android_tools/proguard.flags"
- jar_path = "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar"
-}
-
# TODO(jbudorick): Remove this once net_java_test_support no longer needs it.
android_java_prebuilt("legacy_http_javalib") {
jar_path = "$android_sdk/optional/org.apache.http.legacy.jar"
diff --git a/chrome/android/BUILD.gn b/chrome/android/BUILD.gn
index 5e50b32..0dff611 100644
--- a/chrome/android/BUILD.gn
+++ b/chrome/android/BUILD.gn
@@ -157,7 +157,6 @@ android_library("chrome_java") {
"//third_party/jsr-305:jsr_305_javalib",
"//ui/android:ui_java",
"//ui/android:ui_java_resources",
- google_play_services_library,
]
srcjar_deps = [
@@ -286,7 +285,6 @@ junit_binary("chrome_junit_tests") {
"//third_party/android_tools:android_support_v7_mediarouter_java",
"//third_party/cacheinvalidation:cacheinvalidation_javalib",
"//third_party/junit:hamcrest",
- google_play_services_library,
]
}
@@ -361,7 +359,6 @@ android_library("chrome_shared_test_java") {
"//third_party/jsr-305:jsr_305_javalib",
"//ui/android:ui_java",
"//ui/android:ui_javatests",
- google_play_services_library,
]
}
@@ -471,7 +468,6 @@ template("chrome_public_apk_tmpl_shared") {
":chrome_public_apk_assets",
":chrome_public_apk_resources",
"//base:base_java",
- google_play_services_resources,
]
}
}
diff --git a/chrome/android/chrome_apk.gyp b/chrome/android/chrome_apk.gyp
index d6d3312..abe5871 100644
--- a/chrome/android/chrome_apk.gyp
+++ b/chrome/android/chrome_apk.gyp
@@ -301,7 +301,6 @@
'../../net/net.gyp:net_java_test_support',
'../../sync/sync.gyp:sync_java_test_support',
'../../third_party/android_tools/android_tools.gyp:android_support_v7_appcompat_javalib',
- '../../third_party/android_tools/android_tools.gyp:google_play_services_javalib',
'../../ui/android/ui_android.gyp:ui_javatests',
],
'includes': [ '../../build/java.gypi' ],
diff --git a/chrome/chrome.gyp b/chrome/chrome.gyp
index 1f5fd54..18f1604 100644
--- a/chrome/chrome.gyp
+++ b/chrome/chrome.gyp
@@ -582,7 +582,6 @@
'../third_party/android_tools/android_tools.gyp:android_support_v7_mediarouter_javalib',
'../third_party/android_tools/android_tools.gyp:android_support_v7_recyclerview_javalib',
'../third_party/android_tools/android_tools.gyp:android_support_v13_javalib',
- '../third_party/android_tools/android_tools.gyp:google_play_services_javalib',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib',
'../third_party/gif_player/gif_player.gyp:gif_player_java',
'../third_party/jsr-305/jsr-305.gyp:jsr_305_javalib',
diff --git a/chrome/chrome_tests.gypi b/chrome/chrome_tests.gypi
index 303463b..0026e14 100644
--- a/chrome/chrome_tests.gypi
+++ b/chrome/chrome_tests.gypi
@@ -3190,7 +3190,6 @@
'../base/base.gyp:base_java',
'../third_party/android_tools/android_tools.gyp:android_support_v7_appcompat_javalib',
'../third_party/android_tools/android_tools.gyp:android_support_v7_mediarouter_javalib',
- '../third_party/android_tools/android_tools.gyp:google_play_services_javalib',
],
'variables': {
'java_in_dir': '../chrome/test/android/cast_emulator',
diff --git a/remoting/remoting_android.gypi b/remoting/remoting_android.gypi
index eb0a13d..9aa1362 100644
--- a/remoting/remoting_android.gypi
+++ b/remoting/remoting_android.gypi
@@ -9,9 +9,6 @@
# These hooks allow official builds to modify the remoting_apk target:
# Official build of remoting_apk pulls in extra code.
'remoting_apk_extra_dependencies%': [],
- # A different ProGuard config for Google Play Services is needed since the one used by
- # Chromium and Google Chrome strips out code that we need.
- 'remoting_android_google_play_services_javalib%': '../third_party/android_tools/android_tools.gyp:google_play_services_javalib',
# Allows official builds to define the ApplicationContext class differently, and provide
# different implementations of parts of the product.
'remoting_apk_java_in_dir%': 'android/apk',
@@ -125,7 +122,6 @@
'../third_party/android_tools/android_tools.gyp:android_support_v7_mediarouter_javalib',
'../third_party/android_tools/android_tools.gyp:android_support_v13_javalib',
'../third_party/cardboard-java/cardboard.gyp:cardboard_jar',
- '<(remoting_android_google_play_services_javalib)',
],
'includes': [ '../build/java.gypi' ],
'conditions' : [
diff --git a/sync/android/BUILD.gn b/sync/android/BUILD.gn
index 0f6d2e4..c359695 100644
--- a/sync/android/BUILD.gn
+++ b/sync/android/BUILD.gn
@@ -13,7 +13,6 @@ android_library("sync_java") {
"//third_party/cacheinvalidation:cacheinvalidation_javalib",
"//third_party/cacheinvalidation:cacheinvalidation_proto_java",
"//third_party/jsr-305:jsr_305_javalib",
- google_play_services_library,
]
srcjar_deps = [ ":java_enums" ]
DEPRECATED_java_in_dir = "java/src"
diff --git a/sync/sync_android.gypi b/sync/sync_android.gypi
index 5ea0f3a..05b5141 100644
--- a/sync/sync_android.gypi
+++ b/sync/sync_android.gypi
@@ -19,7 +19,6 @@
'sync_jni_headers',
'../base/base.gyp:base_java',
'../net/net.gyp:net_java',
- '../third_party/android_tools/android_tools.gyp:google_play_services_javalib',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib',
'../third_party/jsr-305/jsr-305.gyp:jsr_305_javalib',
],
diff --git a/third_party/cacheinvalidation/cacheinvalidation.gyp b/third_party/cacheinvalidation/cacheinvalidation.gyp
index dea1f54..7755807 100644
--- a/third_party/cacheinvalidation/cacheinvalidation.gyp
+++ b/third_party/cacheinvalidation/cacheinvalidation.gyp
@@ -223,7 +223,6 @@
'type': 'none',
'dependencies': [
'../../third_party/android_tools/android_tools.gyp:android_gcm',
- '../../third_party/android_tools/android_tools.gyp:google_play_services_javalib',
'cacheinvalidation_proto_java',
],
'variables': {