summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorphajdan.jr <phajdan.jr@chromium.org>2015-08-07 08:00:37 -0700
committerCommit bot <commit-bot@chromium.org>2015-08-07 15:01:23 +0000
commit49d89bd71ce87ebe6f504a871832fcd1e0cc5fa3 (patch)
tree006df6d2cc1a84026b81ce1f19a5441f3a3240ff
parentb483c2b4ad35e2119b2956ece1d4d71dd2631ed3 (diff)
downloadchromium_src-49d89bd71ce87ebe6f504a871832fcd1e0cc5fa3.zip
chromium_src-49d89bd71ce87ebe6f504a871832fcd1e0cc5fa3.tar.gz
chromium_src-49d89bd71ce87ebe6f504a871832fcd1e0cc5fa3.tar.bz2
Remove unused code from infra/scripts/legacy
BUG=506498 Review URL: https://codereview.chromium.org/1276233002 Cr-Commit-Position: refs/heads/master@{#342354}
-rw-r--r--infra/scripts/legacy/scripts/common/chromium_utils.py1139
-rw-r--r--infra/scripts/legacy/scripts/slave/build_directory.py51
-rwxr-xr-xinfra/scripts/legacy/scripts/slave/gtest_slave_utils.py105
-rwxr-xr-xinfra/scripts/legacy/scripts/slave/runtest.py8
-rw-r--r--infra/scripts/legacy/scripts/slave/slave_utils.py498
5 files changed, 2 insertions, 1799 deletions
diff --git a/infra/scripts/legacy/scripts/common/chromium_utils.py b/infra/scripts/legacy/scripts/common/chromium_utils.py
index b41cd44..89ebf8b 100644
--- a/infra/scripts/legacy/scripts/common/chromium_utils.py
+++ b/infra/scripts/legacy/scripts/common/chromium_utils.py
@@ -6,33 +6,22 @@
from contextlib import contextmanager
import ast
-import base64
import cStringIO
import copy
import errno
import fnmatch
import glob
-import math
-import multiprocessing
+import json
import os
import re
import shutil
import socket
import stat
-import string # pylint: disable=W0402
import subprocess
import sys
import threading
import time
import traceback
-import urllib
-import zipfile
-import zlib
-
-try:
- import json # pylint: disable=F0401
-except ImportError:
- import simplejson as json
from common import env
@@ -41,59 +30,6 @@ BUILD_DIR = os.path.realpath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir))
-WIN_LINK_FUNC = None
-try:
- if sys.platform.startswith('win'):
- import ctypes
- # There's 4 possibilities on Windows for links:
- # 1. Symbolic file links;
- # 2. Symbolic directory links;
- # 3. Hardlinked files;
- # 4. Junctioned directories.
- # (Hardlinked directories don't really exist.)
- #
- # 7-Zip does not handle symbolic file links as we want (it puts the
- # content of the link, not what it refers to, and reports "CRC Error" on
- # extraction). It does work as expected for symbolic directory links.
- # Because the majority of the large files are in the root of the staging
- # directory, we do however need to handle file links, so we do this with
- # hardlinking. Junctioning requires a huge whack of code, so we take the
- # slightly odd tactic of using #2 and #3, but not #1 and #4. That is,
- # hardlinks for files, but symbolic links for directories.
- def _WIN_LINK_FUNC(src, dst):
- print 'linking %s -> %s' % (src, dst)
- if os.path.isdir(src):
- if not ctypes.windll.kernel32.CreateSymbolicLinkA(
- str(dst), str(os.path.abspath(src)), 1):
- raise ctypes.WinError()
- else:
- if not ctypes.windll.kernel32.CreateHardLinkA(str(dst), str(src), 0):
- raise ctypes.WinError()
- WIN_LINK_FUNC = _WIN_LINK_FUNC
-except ImportError:
- # If we don't have ctypes or aren't on Windows, leave WIN_LINK_FUNC as None.
- pass
-
-
-# Wrapper around git that enforces a timeout.
-GIT_BIN = os.path.join(BUILD_DIR, 'scripts', 'tools', 'git-with-timeout')
-
-# Wrapper around svn that enforces a timeout.
-SVN_BIN = os.path.join(BUILD_DIR, 'scripts', 'tools', 'svn-with-timeout')
-
-# The Google Storage metadata key for the full commit position
-GS_COMMIT_POSITION_KEY = 'Cr-Commit-Position'
-# The Google Storage metadata key for the commit position number
-GS_COMMIT_POSITION_NUMBER_KEY = 'Cr-Commit-Position-Number'
-# The Google Storage metadata key for the Git commit hash
-GS_GIT_COMMIT_KEY = 'Cr-Git-Commit'
-
-# Regular expression to identify a Git hash
-GIT_COMMIT_HASH_RE = re.compile(r'[a-zA-Z0-9]{40}')
-#
-# Regular expression to parse a commit position
-COMMIT_POSITION_RE = re.compile(r'([^@]+)@{#(\d+)}')
-
# Local errors.
class MissingArgument(Exception):
pass
@@ -101,8 +37,6 @@ class PathNotFound(Exception):
pass
class ExternalError(Exception):
pass
-class NoIdentifiedRevision(Exception):
- pass
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
@@ -113,319 +47,6 @@ def IsLinux():
def IsMac():
return sys.platform.startswith('darwin')
-# For chromeos we need to end up with a different platform name, but the
-# scripts use the values like sys.platform for both the build target and
-# and the running OS, so this gives us a back door that can be hit to
-# force different naming then the default for some of the chromeos build
-# steps.
-override_platform_name = None
-
-
-def OverridePlatformName(name):
- """Sets the override for PlatformName()"""
- global override_platform_name
- override_platform_name = name
-
-
-def PlatformName():
- """Return a string to be used in paths for the platform."""
- if override_platform_name:
- return override_platform_name
- if IsWindows():
- return 'win32'
- if IsLinux():
- return 'linux'
- if IsMac():
- return 'mac'
- raise NotImplementedError('Unknown platform "%s".' % sys.platform)
-
-
-# Name of the file (inside the packaged build) containing revision number
-# of that build. Also used for determining the latest packaged build.
-FULL_BUILD_REVISION_FILENAME = 'FULL_BUILD_REVISION'
-
-def IsGitCommit(value):
- """Returns: If a value is a Git commit hash.
-
- This only works on full Git commit hashes. A value qualifies as a Git commit
- hash if it only contains hexadecimal numbers and is forty characters long.
- """
- if value is None:
- return False
- return GIT_COMMIT_HASH_RE.match(str(value)) is not None
-
-
-# GetParentClass allows a class instance to find its parent class using Python's
-# inspect module. This allows a class instantiated from a module to access
-# their parent class's methods even after the containing module has been
-# re-imported and reloaded.
-#
-# Also see:
-# http://code.google.com/p/chromium/issues/detail?id=34089
-# http://atlee.ca/blog/2008/11/21/python-reload-danger-here-be-dragons/
-#
-def GetParentClass(obj, n=1):
- import inspect
- if inspect.isclass(obj):
- return inspect.getmro(obj)[n]
- else:
- return inspect.getmro(obj.__class__)[n]
-
-
-def MeanAndStandardDeviation(data):
- """Calculates mean and standard deviation for the values in the list.
-
- Args:
- data: list of numbers
-
- Returns:
- Mean and standard deviation for the numbers in the list.
- """
- n = len(data)
- if n == 0:
- return 0.0, 0.0
- mean = float(sum(data)) / n
- variance = sum([(element - mean)**2 for element in data]) / n
- return mean, math.sqrt(variance)
-
-
-def FilteredMeanAndStandardDeviation(data):
- """Calculates mean and standard deviation for the values in the list
- ignoring first occurence of max value (unless there was only one sample).
-
- Args:
- data: list of numbers
-
- Returns:
- Mean and standard deviation for the numbers in the list ignoring
- first occurence of max value.
- """
-
- def _FilterMax(array):
- new_array = copy.copy(array) # making sure we are not creating side-effects
- if len(new_array) != 1:
- new_array.remove(max(new_array))
- return new_array
- return MeanAndStandardDeviation(_FilterMax(data))
-
-def HistogramPercentiles(histogram, percentiles):
- if not 'buckets' in histogram or not 'count' in histogram:
- return []
- computed_percentiles = _ComputePercentiles(histogram['buckets'],
- histogram['count'],
- percentiles)
- output = []
- for p in computed_percentiles:
- output.append({'percentile': p, 'value': computed_percentiles[p]})
- return output
-
-def GeomMeanAndStdDevFromHistogram(histogram):
- if not 'buckets' in histogram:
- return 0.0, 0.0
- count = 0
- sum_of_logs = 0
- for bucket in histogram['buckets']:
- if 'high' in bucket:
- bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
- else:
- bucket['mean'] = bucket['low']
- if bucket['mean'] > 0:
- sum_of_logs += math.log(bucket['mean']) * bucket['count']
- count += bucket['count']
-
- if count == 0:
- return 0.0, 0.0
-
- sum_of_squares = 0
- geom_mean = math.exp(sum_of_logs / count)
- for bucket in histogram['buckets']:
- if bucket['mean'] > 0:
- sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
- return geom_mean, math.sqrt(sum_of_squares / count)
-
-def _LinearInterpolate(x0, target, x1, y0, y1):
- """Perform linear interpolation to estimate an intermediate value.
-
- We assume for some F, F(x0) == y0, and F(x1) == z1.
-
- We return an estimate for what F(target) should be, using linear
- interpolation.
-
- Args:
- x0: (Float) A location at which some function F() is known.
- target: (Float) A location at which we need to estimate F().
- x1: (Float) A second location at which F() is known.
- y0: (Float) The value of F(x0).
- y1: (Float) The value of F(x1).
-
- Returns:
- (Float) The estimated value of F(target).
- """
- if x0 == x1:
- return (y0 + y1) / 2
- return (y1 - y0) * (target - x0) / (x1 - x0) + y0
-
-def _BucketInterpolate(last_percentage, target, next_percentage, bucket_min,
- bucket_max):
- """Estimate a minimum which should have the target % of samples below it.
-
- We do linear interpolation only if last_percentage and next_percentage are
- adjacent, and hence we are in a linear section of a histogram. Once they
- spread further apart we generally get exponentially broader buckets, and we
- need to interpolate in the log domain (and exponentiate our result).
-
- Args:
- last_percentage: (Float) This is the percentage of samples below bucket_min.
- target: (Float) A percentage for which we need an estimated bucket.
- next_percentage: (Float) This is the percentage of samples below bucket_max.
- bucket_min: (Float) This is the lower value for samples in a bucket.
- bucket_max: (Float) This exceeds the upper value for samples.
-
- Returns:
- (Float) An estimate of what bucket cutoff would have probably had the target
- percentage.
- """
- log_domain = False
- if bucket_min + 1.5 < bucket_max and bucket_min > 0:
- log_domain = True
- bucket_min = math.log(bucket_min)
- bucket_max = math.log(bucket_max)
- result = _LinearInterpolate(
- last_percentage, target, next_percentage, bucket_min, bucket_max)
- if log_domain:
- result = math.exp(result)
- return result
-
-def _ComputePercentiles(buckets, total, percentiles):
- """Compute percentiles for the given histogram.
-
- Returns estimates for the bucket cutoffs that would probably have the taret
- percentiles.
-
- Args:
- buckets: (List) A list of buckets representing the histogram to analyze.
- total: (Float) The total number of samples in the histogram.
- percentiles: (Tuple) The percentiles we are interested in.
-
- Returns:
- (Dictionary) Map from percentiles to bucket cutoffs.
- """
- if not percentiles:
- return {}
- current_count = 0
- current_percentage = 0
- next_percentile_index = 0
- result = {}
- for bucket in buckets:
- if bucket['count'] > 0:
- current_count += bucket['count']
- old_percentage = current_percentage
- current_percentage = float(current_count) / total
-
- # Check whether we passed one of the percentiles we're interested in.
- while (next_percentile_index < len(percentiles) and
- current_percentage > percentiles[next_percentile_index]):
- if not 'high' in bucket:
- result[percentiles[next_percentile_index]] = bucket['low']
- else:
- result[percentiles[next_percentile_index]] = float(_BucketInterpolate(
- old_percentage, percentiles[next_percentile_index],
- current_percentage, bucket['low'], bucket['high']))
- next_percentile_index += 1
- return result
-
-class InitializePartiallyWithArguments:
- # pylint: disable=old-style-class
- """Function currying implementation.
-
- Works for constructors too. Primary use is to be able to construct a class
- with some constructor arguments beings set ahead of actual initialization.
- Copy of an ASPN cookbook (#52549).
- """
-
- def __init__(self, clazz, *args, **kwargs):
- self.clazz = clazz
- self.pending = args[:]
- self.kwargs = kwargs.copy()
-
- def __call__(self, *args, **kwargs):
- if kwargs and self.kwargs:
- kw = self.kwargs.copy()
- kw.update(kwargs)
- else:
- kw = kwargs or self.kwargs
-
- return self.clazz(*(self.pending + args), **kw)
-
-
-def Prepend(filepath, text):
- """ Prepends text to the file.
-
- Creates the file if it does not exist.
- """
- file_data = text
- if os.path.exists(filepath):
- file_data += open(filepath).read()
- f = open(filepath, 'w')
- f.write(file_data)
- f.close()
-
-
-def MakeWorldReadable(path):
- """Change the permissions of the given path to make it world-readable.
- This is often needed for archived files, so they can be served by web servers
- or accessed by unprivileged network users."""
-
- # No need to do anything special on Windows.
- if IsWindows():
- return
-
- perms = stat.S_IMODE(os.stat(path)[stat.ST_MODE])
- if os.path.isdir(path):
- # Directories need read and exec.
- os.chmod(path, perms | 0555)
- else:
- os.chmod(path, perms | 0444)
-
-
-def MakeParentDirectoriesWorldReadable(path):
- """Changes the permissions of the given path and its parent directories
- to make them world-readable. Stops on first directory which is
- world-readable. This is often needed for archive staging directories,
- so that they can be served by web servers or accessed by unprivileged
- network users."""
-
- # No need to do anything special on Windows.
- if IsWindows():
- return
-
- while path != os.path.dirname(path):
- current_permissions = stat.S_IMODE(os.stat(path)[stat.ST_MODE])
- if current_permissions & 0555 == 0555:
- break
- os.chmod(path, current_permissions | 0555)
- path = os.path.dirname(path)
-
-
-def MaybeMakeDirectory(*path):
- """Creates an entire path, if it doesn't already exist."""
- file_path = os.path.join(*path)
- try:
- os.makedirs(file_path)
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
-
-
-def RemovePath(*path):
- """Removes the file or directory at 'path', if it exists."""
- file_path = os.path.join(*path)
- if os.path.exists(file_path):
- if os.path.isdir(file_path):
- RemoveDirectory(file_path)
- else:
- RemoveFile(file_path)
-
def RemoveFile(*path):
"""Removes the file located at 'path', if it exists."""
@@ -437,57 +58,6 @@ def RemoveFile(*path):
raise
-def MoveFile(path, new_path):
- """Moves the file located at 'path' to 'new_path', if it exists."""
- try:
- RemoveFile(new_path)
- os.rename(path, new_path)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
-
-def LocateFiles(pattern, root=os.curdir):
- """Yeilds files matching pattern found in root and its subdirectories.
-
- An exception is thrown if root doesn't exist."""
- for path, _, files in os.walk(os.path.abspath(root)):
- for filename in fnmatch.filter(files, pattern):
- yield os.path.join(path, filename)
-
-
-def RemoveFilesWildcards(file_wildcard, root=os.curdir):
- """Removes files matching 'file_wildcard' in root and its subdirectories, if
- any exists.
-
- An exception is thrown if root doesn't exist."""
- for item in LocateFiles(file_wildcard, root):
- try:
- os.remove(item)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
-
-def RemoveGlobbedPaths(path_wildcard, root=os.curdir):
- """Removes all paths matching 'path_wildcard' beneath root.
-
- Returns the list of paths removed.
-
- An exception is thrown if root doesn't exist."""
- if not os.path.exists(root):
- raise OSError(2, 'No such file or directory', root)
-
- full_path_wildcard = os.path.join(path_wildcard, root)
- paths = glob.glob(full_path_wildcard)
- for path in paths:
- # When glob returns directories they end in "/."
- if path.endswith(os.sep + '.'):
- path = path[:-2]
- RemovePath(path)
- return paths
-
-
def RemoveDirectory(*path):
"""Recursively removes a directory, even if it's marked read-only.
@@ -573,225 +143,6 @@ def RemoveDirectory(*path):
remove_with_retry(os.rmdir, file_path)
-def CopyFileToDir(src_path, dest_dir, dest_fn=None, link_ok=False):
- """Copies the file found at src_path to the dest_dir directory, with metadata.
-
- If dest_fn is specified, the src_path is copied to that name in dest_dir,
- otherwise it is copied to a file of the same name.
-
- Raises PathNotFound if either the file or the directory is not found.
- """
- # Verify the file and directory separately so we can tell them apart and
- # raise PathNotFound rather than shutil.copyfile's IOError.
- if not os.path.isfile(src_path):
- raise PathNotFound('Unable to find file %s' % src_path)
- if not os.path.isdir(dest_dir):
- raise PathNotFound('Unable to find dir %s' % dest_dir)
- src_file = os.path.basename(src_path)
- if dest_fn:
- # If we have ctypes and the caller doesn't mind links, use that to
- # try to make the copy faster on Windows. http://crbug.com/418702.
- if link_ok and WIN_LINK_FUNC:
- WIN_LINK_FUNC(src_path, os.path.join(dest_dir, dest_fn))
- else:
- shutil.copy2(src_path, os.path.join(dest_dir, dest_fn))
- else:
- shutil.copy2(src_path, os.path.join(dest_dir, src_file))
-
-
-def MakeZip(output_dir, archive_name, file_list, file_relative_dir,
- raise_error=True, remove_archive_directory=True):
- """Packs files into a new zip archive.
-
- Files are first copied into a directory within the output_dir named for
- the archive_name, which will be created if necessary and emptied if it
- already exists. The files are then then packed using archive names
- relative to the output_dir. That is, if the zipfile is unpacked in place,
- it will create a directory identical to the new archive_name directory, in
- the output_dir. The zip file will be named as the archive_name, plus
- '.zip'.
-
- Args:
- output_dir: Absolute path to the directory in which the archive is to
- be created.
- archive_dir: Subdirectory of output_dir holding files to be added to
- the new zipfile.
- file_list: List of paths to files or subdirectories, relative to the
- file_relative_dir.
- file_relative_dir: Absolute path to the directory containing the files
- and subdirectories in the file_list.
- raise_error: Whether to raise a PathNotFound error if one of the files in
- the list is not found.
- remove_archive_directory: Whether to remove the archive staging directory
- before copying files over to it.
-
- Returns:
- A tuple consisting of (archive_dir, zip_file_path), where archive_dir
- is the full path to the newly created archive_name subdirectory.
-
- Raises:
- PathNotFound if any of the files in the list is not found, unless
- raise_error is False, in which case the error will be ignored.
- """
-
- start_time = time.clock()
- # Collect files into the archive directory.
- archive_dir = os.path.join(output_dir, archive_name)
- print 'output_dir: %s, archive_name: %s' % (output_dir, archive_name)
- print 'archive_dir: %s, remove_archive_directory: %s, exists: %s' % (
- archive_dir, remove_archive_directory, os.path.exists(archive_dir))
- if remove_archive_directory and os.path.exists(archive_dir):
- # Move it even if it's not a directory as expected. This can happen with
- # FILES.cfg archive creation where we create an archive staging directory
- # that is the same name as the ultimate archive name.
- if not os.path.isdir(archive_dir):
- print 'Moving old "%s" file to create same name directory.' % archive_dir
- previous_archive_file = '%s.old' % archive_dir
- MoveFile(archive_dir, previous_archive_file)
- else:
- print 'Removing %s' % archive_dir
- RemoveDirectory(archive_dir)
- print 'Now, os.path.exists(%s): %s' % (
- archive_dir, os.path.exists(archive_dir))
- MaybeMakeDirectory(archive_dir)
- for needed_file in file_list:
- needed_file = needed_file.rstrip()
- # These paths are relative to the file_relative_dir. We need to copy
- # them over maintaining the relative directories, where applicable.
- src_path = os.path.join(file_relative_dir, needed_file)
- dirname, basename = os.path.split(needed_file)
- try:
- if os.path.isdir(src_path):
- if WIN_LINK_FUNC:
- WIN_LINK_FUNC(src_path, os.path.join(archive_dir, needed_file))
- else:
- shutil.copytree(src_path, os.path.join(archive_dir, needed_file),
- symlinks=True)
- elif dirname != '' and basename != '':
- dest_dir = os.path.join(archive_dir, dirname)
- MaybeMakeDirectory(dest_dir)
- CopyFileToDir(src_path, dest_dir, basename, link_ok=True)
- else:
- CopyFileToDir(src_path, archive_dir, basename, link_ok=True)
- except PathNotFound:
- if raise_error:
- raise
- end_time = time.clock()
- print 'Took %f seconds to create archive directory.' % (end_time - start_time)
-
- # Pack the zip file.
- output_file = '%s.zip' % archive_dir
- previous_file = '%s_old.zip' % archive_dir
- MoveFile(output_file, previous_file)
-
- # If we have 7z, use that as it's much faster. See http://crbug.com/418702.
- windows_zip_cmd = None
- if os.path.exists('C:\\Program Files\\7-Zip\\7z.exe'):
- windows_zip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'a', '-y', '-mx1']
-
- # On Windows we use the python zip module; on Linux and Mac, we use the zip
- # command as it will handle links and file bits (executable). Which is much
- # easier then trying to do that with ZipInfo options.
- start_time = time.clock()
- if IsWindows() and not windows_zip_cmd:
- print 'Creating %s' % output_file
-
- def _Addfiles(to_zip_file, dirname, files_to_add):
- for this_file in files_to_add:
- archive_name = this_file
- this_path = os.path.join(dirname, this_file)
- if os.path.isfile(this_path):
- # Store files named relative to the outer output_dir.
- archive_name = this_path.replace(output_dir + os.sep, '')
- if os.path.getsize(this_path) == 0:
- compress_method = zipfile.ZIP_STORED
- else:
- compress_method = zipfile.ZIP_DEFLATED
- to_zip_file.write(this_path, archive_name, compress_method)
- print 'Adding %s' % archive_name
- zip_file = zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED,
- allowZip64=True)
- try:
- os.path.walk(archive_dir, _Addfiles, zip_file)
- finally:
- zip_file.close()
- else:
- if IsMac() or IsLinux():
- zip_cmd = ['zip', '-yr1']
- else:
- zip_cmd = windows_zip_cmd
- saved_dir = os.getcwd()
- os.chdir(os.path.dirname(archive_dir))
- command = zip_cmd + [output_file, os.path.basename(archive_dir)]
- result = RunCommand(command)
- os.chdir(saved_dir)
- if result and raise_error:
- raise ExternalError('zip failed: %s => %s' %
- (str(command), result))
- end_time = time.clock()
- print 'Took %f seconds to create zip.' % (end_time - start_time)
- return (archive_dir, output_file)
-
-
-def ExtractZip(filename, output_dir, verbose=True):
- """ Extract the zip archive in the output directory.
- """
- MaybeMakeDirectory(output_dir)
-
- # On Linux and Mac, we use the unzip command as it will
- # handle links and file bits (executable), which is much
- # easier then trying to do that with ZipInfo options.
- #
- # The Mac Version of unzip unfortunately does not support Zip64, whereas
- # the python module does, so we have to fallback to the python zip module
- # on Mac if the filesize is greater than 4GB.
- #
- # On Windows, try to use 7z if it is installed, otherwise fall back to python
- # zip module and pray we don't have files larger than 512MB to unzip.
- unzip_cmd = None
- if ((IsMac() and os.path.getsize(filename) < 4 * 1024 * 1024 * 1024)
- or IsLinux()):
- unzip_cmd = ['unzip', '-o']
- elif IsWindows() and os.path.exists('C:\\Program Files\\7-Zip\\7z.exe'):
- unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
-
- if unzip_cmd:
- # Make sure path is absolute before changing directories.
- filepath = os.path.abspath(filename)
- saved_dir = os.getcwd()
- os.chdir(output_dir)
- command = unzip_cmd + [filepath]
- result = RunCommand(command)
- os.chdir(saved_dir)
- if result:
- raise ExternalError('unzip failed: %s => %s' % (str(command), result))
- else:
- assert IsWindows() or IsMac()
- zf = zipfile.ZipFile(filename)
- # TODO(hinoka): This can be multiprocessed.
- for name in zf.namelist():
- if verbose:
- print 'Extracting %s' % name
- zf.extract(name, output_dir)
- if IsMac():
- # Restore permission bits.
- os.chmod(os.path.join(output_dir, name),
- zf.getinfo(name).external_attr >> 16L)
-
-
-def WindowsPath(path):
- """Returns a Windows mixed-style absolute path, given a Cygwin absolute path.
-
- The version of Python in the Chromium tree uses posixpath for os.path even
- on Windows, so we convert to a mixed Windows path (that is, a Windows path
- that uses forward slashes instead of backslashes) manually.
- """
- # TODO(pamg): make this work for other drives too.
- if path.startswith('/cygdrive/c/'):
- return path.replace('/cygdrive/c/', 'C:/')
- return path
-
-
def FindUpwardParent(start_dir, *desired_list):
"""Finds the desired object's parent, searching upward from the start_dir.
@@ -831,31 +182,6 @@ def FindUpward(start_dir, *desired_list):
return os.path.join(parent, *desired_list)
-def RunAndPrintDots(function):
- """Starts a background thread that prints dots while the function runs."""
-
- def Hook(*args, **kwargs):
- event = threading.Event()
-
- def PrintDots():
- counter = 0
- while not event.isSet():
- event.wait(5)
- sys.stdout.write('.')
- counter = (counter + 1) % 80
- if not counter:
- sys.stdout.write('\n')
- sys.stdout.flush()
- t = threading.Thread(target=PrintDots)
- t.start()
- try:
- return function(*args, **kwargs)
- finally:
- event.set()
- t.join()
- return Hook
-
-
class RunCommandFilter(object):
"""Class that should be subclassed to provide a filter for RunCommand."""
# Method could be a function
@@ -873,19 +199,6 @@ class RunCommandFilter(object):
return last_bits
-class FilterCapture(RunCommandFilter):
- """Captures the text and places it into an array."""
- def __init__(self):
- RunCommandFilter.__init__(self)
- self.text = []
-
- def FilterLine(self, line):
- self.text.append(line.rstrip())
-
- def FilterDone(self, text):
- self.text.append(text)
-
-
def RunCommand(command, parser_func=None, filter_obj=None, pipes=None,
print_cmd=True, timeout=None, max_time=None, **kwargs):
"""Runs the command list, printing its output and returning its exit status.
@@ -1159,92 +472,6 @@ def GetCommandOutput(command):
return output
-def GetGClientCommand(platform=None):
- """Returns the executable command name, depending on the platform.
- """
- if not platform:
- platform = sys.platform
- if platform.startswith('win'):
- # Windows doesn't want to depend on bash.
- return 'gclient.bat'
- else:
- return 'gclient'
-
-
-# Linux scripts use ssh to to move files to the archive host.
-def SshMakeDirectory(host, dest_path):
- """Creates the entire dest_path on the remote ssh host.
- """
- command = ['ssh', host, 'mkdir', '-p', dest_path]
- result = RunCommand(command)
- if result:
- raise ExternalError('Failed to ssh mkdir "%s" on "%s" (%s)' %
- (dest_path, host, result))
-
-
-def SshMoveFile(host, src_path, dest_path):
- """Moves src_path (if it exists) to dest_path on the remote host.
- """
- command = ['ssh', host, 'test', '-e', src_path]
- result = RunCommand(command)
- if result:
- # Nothing to do if src_path doesn't exist.
- return result
-
- command = ['ssh', host, 'mv', src_path, dest_path]
- result = RunCommand(command)
- if result:
- raise ExternalError('Failed to ssh mv "%s" -> "%s" on "%s" (%s)' %
- (src_path, dest_path, host, result))
-
-
-def SshCopyFiles(srcs, host, dst):
- """Copies the srcs file(s) to dst on the remote ssh host.
- dst is expected to exist.
- """
- command = ['scp', srcs, host + ':' + dst]
- result = RunCommand(command)
- if result:
- raise ExternalError('Failed to scp "%s" to "%s" (%s)' %
- (srcs, host + ':' + dst, result))
-
-
-def SshExtractZip(host, zipname, dst):
- """extract the remote zip file to dst on the remote ssh host.
- """
- command = ['ssh', host, 'unzip', '-o', '-d', dst, zipname]
- result = RunCommand(command)
- if result:
- raise ExternalError('Failed to ssh unzip -o -d "%s" "%s" on "%s" (%s)' %
- (dst, zipname, host, result))
-
- # unzip will create directories with access 700, which is not often what we
- # need. Fix the permissions for the whole archive.
- command = ['ssh', host, 'chmod', '-R', '755', dst]
- result = RunCommand(command)
- if result:
- raise ExternalError('Failed to ssh chmod -R 755 "%s" on "%s" (%s)' %
- (dst, host, result))
-
-
-def SshCopyTree(srctree, host, dst):
- """Recursively copies the srctree to dst on the remote ssh host.
- For consistency with shutil, dst is expected to not exist.
- """
- command = ['ssh', host, '[ -d "%s" ]' % dst]
- result = RunCommand(command)
- if result:
- raise ExternalError('SshCopyTree destination directory "%s" already exists.'
- % host + ':' + dst)
-
- SshMakeDirectory(host, os.path.dirname(dst))
- command = ['scp', '-r', '-p', srctree, host + ':' + dst]
- result = RunCommand(command)
- if result:
- raise ExternalError('Failed to scp "%s" to "%s" (%s)' %
- (srctree, host + ':' + dst, result))
-
-
def ListMasters(cue='master.cfg', include_public=True, include_internal=True):
"""Returns all the masters found."""
# Look for "internal" masters first.
@@ -1296,14 +523,6 @@ def GetAllSlaves(fail_hard=False, include_public=True, include_internal=True):
return slaves
-def GetSlavesForHost():
- """Get slaves for a host, defaulting to current host."""
- hostname = os.getenv('TESTING_SLAVENAME')
- if not hostname:
- hostname = socket.getfqdn().split('.', 1)[0].lower()
- return [s for s in GetAllSlaves() if s.get('hostname') == hostname]
-
-
def GetActiveSubdir():
"""Get current checkout's subdir, if checkout uses subdir layout."""
rootdir, subdir = os.path.split(os.path.dirname(BUILD_DIR))
@@ -1401,217 +620,6 @@ def convert_json(option, _, value, parser):
setattr(parser.values, option.dest, json.loads(value))
-def b64_gz_json_encode(obj):
- """Serialize a python object into base64."""
- # The |separators| argument is to densify the command line.
- return base64.b64encode(zlib.compress(
- json.dumps(obj or {}, sort_keys=True, separators=(',', ':')), 9))
-
-
-def convert_gz_json(option, _, value, parser):
- """Provide an OptionParser callback to unmarshal a b64 gz JSON string."""
- setattr(
- parser.values, option.dest,
- json.loads(zlib.decompress(base64.b64decode(value))))
-
-
-def SafeTranslate(inputstr):
- """Convert a free form string to one that can be used in a path.
-
- This is similar to the safeTranslate function in buildbot.
- """
-
- badchars_map = string.maketrans('\t !#$%&\'()*+,./:;<=>?@[\\]^{|}~',
- '______________________________')
- if isinstance(inputstr, unicode):
- inputstr = inputstr.encode('utf8')
- return inputstr.translate(badchars_map)
-
-
-def GetPrimaryProject(options):
- """Returns: (str) the key of the primary project, or 'None' if none exists.
- """
- # The preferred way is to reference the 'primary_project' parameter.
- result = options.build_properties.get('primary_project')
- if result:
- return result
-
- # TODO(dnj): The 'primary_repo' parameter is used by some scripts to indictate
- # the primary project name. This is not consistently used and will be
- # deprecated in favor of 'primary_project' once that is rolled out.
- result = options.build_properties.get('primary_repo')
- if not result:
- # The 'primary_repo' property currently contains a trailing underscore.
- # However, this isn't an obvious thing given its name, so we'll strip it
- # here and remove that expectation.
- return result.strip('_')
- return None
-
-
-def GetBuildSortKey(options, project=None):
- """Reads a variety of sources to determine the current build revision.
-
- NOTE: Currently, the return value does not qualify branch name. This can
- present a problem with git numbering scheme, where numbers are only unique
- in the context of their respective branches. When this happens, this
- function will return a branch name as part of the sort key and its callers
- will need to adapt their naming/querying schemes to accommodate this. Until
- then, we will return 'None' as the branch name.
- (e.g., refs/foo/bar@{#12345} => ("refs/foo/bar", 12345)
-
- Args:
- options: Command-line options structure
- project: (str/None) If not None, the project to get the build sort key
- for. Otherwise, the build-wide sort key will be used.
- Returns: (branch, value) The qualified sortkey value
- branch: (str/None) The name of the branch, or 'None' if there is no branch
- context. Currently this always returns 'None'.
- value: (int) The iteration value within the specified branch
- Raises: (NoIdentifiedRevision) if no revision could be identified from the
- supplied options.
- """
- # Is there a commit position for this build key?
- try:
- return GetCommitPosition(options, project=project)
- except NoIdentifiedRevision:
- pass
-
- # Nope; derive the sort key from the 'got_[*_]revision' build properties. Note
- # that this could be a Git commit (post flag day).
- if project:
- revision_key = 'got_%s_revision' % (project,)
- else:
- revision_key = 'got_revision'
- revision = options.build_properties.get(revision_key)
- if revision and not IsGitCommit(revision):
- return None, int(revision)
- raise NoIdentifiedRevision("Unable to identify revision for revision key "
- "[%s]" % (revision_key,))
-
-
-def GetGitCommit(options, project=None):
- """Returns the 'git' commit hash for the specified repository
-
- This function uses environmental options to identify the 'git' commit hash
- for the specified repository.
-
- Args:
- options: Command-line options structure
- project: (str/None) The project key to use. If None, use the topmost
- repository identification properties.
- Raises: (NoIdentifiedRevision) if no git commit could be identified from the
- supplied options.
- """
- if project:
- git_commit_key = 'got_%s_revision_git' % (project,)
- else:
- git_commit_key = 'got_revision_git'
- commit = options.build_properties.get(git_commit_key)
- if commit:
- return commit
-
- # Is 'got_[_*]revision' itself is the Git commit?
- if project:
- commit_key = 'got_%s_revision' % (project,)
- else:
- commit_key = 'got_revision'
- commit = options.build_properties.get(commit_key)
- if commit and IsGitCommit(commit):
- return commit
- raise NoIdentifiedRevision("Unable to identify commit for commit key: %s" % (
- (git_commit_key, commit_key),))
-
-
-def GetSortableUploadPathForSortKey(branch, value, delimiter=None):
- """Returns: (str) the canonical sort key path constructed from a sort key.
-
- Returns a canonical sort key path for a sort key. The result will be one of
- the following forms:
- - (Without Branch or With Branch=='refs/heads/master'): <value> (e.g., 12345)
- - (With non-Master Branch): <branch-path>-<value> (e.g.,
- "refs_my-branch-12345")
-
- When a 'branch' is supplied, it is converted to a path-suitable form. This
- conversion replaces undesirable characters ('/') with underscores.
-
- Note that when parsing the upload path, 'rsplit' should be used to isolate the
- commit position value, as the branch path may have instances of the delimiter
- in it.
-
- See 'GetBuildSortKey' for more information about sort keys.
-
- Args:
- branch: (str/None) The sort key branch, or 'None' if there is no associated
- branch.
- value: (int) The sort key value.
- delimiter: (str) The delimiter to insert in between <branch-path> and
- <value> when constructing the branch-inclusive form. If omitted
- (default), a hyphen ('-') will be used.
- """
- if branch and branch != 'refs/heads/master':
- delimiter = delimiter or '-'
- branch = branch.replace('/', '_')
- return '%s%s%s' % (branch, delimiter, value)
- return str(value)
-
-
-def ParseCommitPosition(value):
- """Returns: The (branch, value) parsed from a commit position string.
-
- Args:
- value: (str) The value to parse.
- Raises:
- ValueError: If a commit position could not be parsed from 'value'.
- """
- match = COMMIT_POSITION_RE.match(value)
- if not match:
- raise ValueError("Failed to parse commit position from '%s'" % (value,))
- return match.group(1), int(match.group(2))
-
-
-def BuildCommitPosition(branch, value):
- """Returns: A constructed commit position.
-
- An example commit position for branch 'refs/heads/master' value '12345' is:
- refs/heads/master@{#12345}
-
- This value can be parsed via 'ParseCommitPosition'.
-
- Args:
- branch: (str) The name of the commit position branch
- value: (int): The commit position number.
- """
- return '%s@{#%s}' % (branch, value)
-
-
-def GetCommitPosition(options, project=None):
- """Returns: (branch, value) The parsed commit position from build options.
-
- Returns the parsed commit position from the build options. This is identified
- by examining the 'got_revision_cp' (or 'got_REPO_revision_cp', if 'project' is
- specified) keys.
-
- Args:
- options: Command-line options structure
- project: (str/None) If not None, the project to get the build sort key
- for. Otherwise, the build-wide sort key will be used.
- Returns: (branch, value) The qualified commit position value
- Raises:
- NoIdentifiedRevision: if no revision could be identified from the
- supplied options.
- ValueError: If the supplied commit position failed to parse successfully.
- """
- if project:
- key = 'got_%s_revision_cp' % (project,)
- else:
- key = 'got_revision_cp'
- cp = options.build_properties.get(key)
- if not cp:
- raise NoIdentifiedRevision("Unable to identify the commit position; the "
- "build property is missing: %s" % (key,))
- return ParseCommitPosition(cp)
-
-
def AddPropertiesOptions(option_parser):
"""Registers command line options for parsing build and factory properties.
@@ -1634,89 +642,6 @@ def AddPropertiesOptions(option_parser):
help='factory properties in JSON format')
-def AddThirdPartyLibToPath(lib, override=False):
- """Adds the specified dir in build/third_party to sys.path.
-
- Setting 'override' to true will place the directory in the beginning of
- sys.path, useful for overriding previously set packages.
-
- NOTE: We would like to deprecate this method, as it allows (encourages?)
- scripts to define their own one-off Python path sequences, creating a
- difficult-to-manage state where different scripts and libraries have
- different path expectations. Please don't use this method if possible;
- it preferred to augment 'common.env' instead.
- """
- libpath = os.path.abspath(os.path.join(BUILD_DIR, 'third_party', lib))
- if override:
- sys.path.insert(0, libpath)
- else:
- sys.path.append(libpath)
-
-
-def GetLKGR():
- """Connect to chromium LKGR server and get LKGR revision.
-
- On success, returns the LKGR and 'ok'. On error, returns None and the text of
- the error message.
- """
-
- try:
- conn = urllib.urlopen('https://chromium-status.appspot.com/lkgr')
- except IOError:
- return (None, 'Error connecting to LKGR server! Is your internet '
- 'connection working properly?')
- try:
- rev = int('\n'.join(conn.readlines()))
- except IOError:
- return (None, 'Error connecting to LKGR server! Is your internet '
- 'connection working properly?')
- except ValueError:
- return None, 'LKGR server returned malformed data! Aborting...'
- finally:
- conn.close()
-
- return rev, 'ok'
-
-
-def AbsoluteCanonicalPath(*path):
- """Return the most canonical path Python can provide."""
-
- file_path = os.path.join(*path)
- return os.path.realpath(os.path.abspath(os.path.expanduser(file_path)))
-
-
-def IsolatedImportFromPath(path, extra_paths=None):
- dir_path, module_file = os.path.split(path)
- module_file = os.path.splitext(module_file)[0]
-
- saved = sys.path
- sys.path = [dir_path] + (extra_paths or [])
- try:
- return __import__(module_file)
- except ImportError:
- pass
- finally:
- sys.path = saved
-
-
-@contextmanager
-def MultiPool(processes):
- """Manages a multiprocessing.Pool making sure to close the pool when done.
-
- This will also call pool.terminate() when an exception is raised (and
- re-raised the exception to the calling procedure can handle it).
- """
- try:
- pool = multiprocessing.Pool(processes=processes)
- yield pool
- pool.close()
- except:
- pool.terminate()
- raise
- finally:
- pool.join()
-
-
def ReadJsonAsUtf8(filename=None, text=None):
"""Read a json file or string and output a dict.
@@ -1762,60 +687,6 @@ def ReadJsonAsUtf8(filename=None, text=None):
return json.loads(text, object_hook=_decode_dict)
-def GetMasterDevParameters(filename='master_cfg_params.json'):
- """Look for master development parameter files in the master directory.
-
- Return the parsed content if the file exists, as a dictionary.
- Every string value in the dictionary is utf8-encoded str.
-
- If the file is not found, returns an empty dict. This is on purpose, to
- make the file optional.
- """
- if os.path.isfile(filename):
- return ReadJsonAsUtf8(filename=filename)
- return {}
-
-
-def FileExclusions():
- all_platforms = ['.landmines', 'obj', 'gen', '.ninja_deps', '.ninja_log']
- # Skip files that the testers don't care about. Mostly directories.
- if IsWindows():
- # Remove obj or lib dir entries
- return all_platforms + ['cfinstaller_archive', 'lib', 'installer_archive']
- if IsMac():
- return all_platforms + [
- # We don't need the arm bits v8 builds.
- 'd8_arm', 'v8_shell_arm',
- # pdfsqueeze is a build helper, no need to copy it to testers.
- 'pdfsqueeze',
- # We copy the framework into the app bundle, we don't need the second
- # copy outside the app.
- # TODO(mark): Since r28431, the copy in the build directory is actually
- # used by tests. Putting two copies in the .zip isn't great, so maybe
- # we can find another workaround.
- # 'Chromium Framework.framework',
- # 'Google Chrome Framework.framework',
- # We copy the Helper into the app bundle, we don't need the second
- # copy outside the app.
- 'Chromium Helper.app',
- 'Google Chrome Helper.app',
- 'App Shim Socket',
- '.deps', 'obj.host', 'obj.target', 'lib'
- ]
- if IsLinux():
- return all_platforms + [
- # intermediate build directories (full of .o, .d, etc.).
- 'appcache', 'glue', 'lib.host', 'obj.host',
- 'obj.target', 'src', '.deps',
- # scons build cruft
- '.sconsign.dblite',
- # build helper, not needed on testers
- 'mksnapshot',
- ]
-
- return all_platforms
-
-
def DatabaseSetup(buildmaster_config, require_dbconfig=False):
"""Read database credentials in the master directory."""
if os.path.isfile('.dbconfig'):
@@ -1904,11 +775,3 @@ def GetSlavesFromBuilders(builders):
})
return slaves
-
-def GetSlaveNamesForBuilder(builders, builder_name):
- """Returns a list of slave hostnames for the given builder name."""
- slaves = []
- pool_names = builders['builders'][builder_name]['slave_pools']
- for pool_name in pool_names:
- slaves.extend(builders['slave_pools'][pool_name]['slaves'])
- return slaves
diff --git a/infra/scripts/legacy/scripts/slave/build_directory.py b/infra/scripts/legacy/scripts/slave/build_directory.py
index d7aa35b..d0686f5 100644
--- a/infra/scripts/legacy/scripts/slave/build_directory.py
+++ b/infra/scripts/legacy/scripts/slave/build_directory.py
@@ -33,20 +33,6 @@ def AreNinjaFilesNewerThanXcodeFiles(src_dir=None):
return IsFileNewerThanFile(ninja_path, xcode_path)
-def AreNinjaFilesNewerThanMSVSFiles(src_dir=None):
- """Returns True if the generated ninja files are newer than the generated
- msvs files.
-
- Parameters:
- src_dir: The path to the src directory. If None, it's assumed to be
- at src/ relative to the current working directory.
- """
- src_dir = src_dir or 'src'
- ninja_path = os.path.join(src_dir, 'out', 'Release', 'build.ninja')
- msvs_path = os.path.join(src_dir, 'build', 'all.sln')
- return IsFileNewerThanFile(ninja_path, msvs_path)
-
-
def GetBuildOutputDirectory(src_dir=None, cros_board=None):
"""Returns the path to the build directory, relative to the checkout root.
@@ -71,41 +57,6 @@ def GetBuildOutputDirectory(src_dir=None, cros_board=None):
return os.path.join(src_dir, 'xcodebuild')
if sys.platform == 'cygwin' or sys.platform.startswith('win'):
- if AreNinjaFilesNewerThanMSVSFiles(src_dir):
- return os.path.join(src_dir, 'out')
- return os.path.join(src_dir, 'build')
+ return os.path.join(src_dir, 'out')
raise NotImplementedError('Unexpected platform %s' % sys.platform)
-
-
-def RmtreeExceptNinjaOrGomaFiles(build_output_dir):
- """Recursively removes everything but ninja files from a build directory."""
- for root, _, files in os.walk(build_output_dir, topdown=False):
- for f in files:
- # For .manifest in particular, gyp windows ninja generates manifest
- # files at generation time but clobber nukes at the beginning of
- # compile, so make sure not to delete those generated files, otherwise
- # compile will fail.
- if (f.endswith('.ninja') or f.endswith('.manifest') or
- f == 'args.gn' or
- f.startswith('msvc') or # VS runtime DLLs.
- f.startswith('pgort') or # VS PGO runtime DLL.
- f in ('gyp-mac-tool', 'gyp-win-tool',
- 'environment.x86', 'environment.x64')):
- continue
- # Keep goma related files.
- if f == '.goma_deps':
- continue
- os.unlink(os.path.join(root, f))
- # Delete the directory if empty; this works because the walk is bottom-up.
- try:
- os.rmdir(root)
- except OSError, e:
- if e.errno in (39, 41, 66):
- # If the directory isn't empty, ignore it.
- # On Windows, os.rmdir will raise WindowsError with winerror 145,
- # which e.errno is 41.
- # On Linux, e.errno is 39.
- pass
- else:
- raise
diff --git a/infra/scripts/legacy/scripts/slave/gtest_slave_utils.py b/infra/scripts/legacy/scripts/slave/gtest_slave_utils.py
index e35dd1d..2b4135c 100755
--- a/infra/scripts/legacy/scripts/slave/gtest_slave_utils.py
+++ b/infra/scripts/legacy/scripts/slave/gtest_slave_utils.py
@@ -4,13 +4,9 @@
# found in the LICENSE file.
import logging
-import optparse
import os
-import re
-import sys
from common import gtest_utils
-from xml.dom import minidom
from slave.gtest.json_results_generator import JSONResultsGenerator
from slave.gtest.test_result import canonical_name
from slave.gtest.test_result import TestResult
@@ -44,38 +40,6 @@ def GetResultsMap(observer):
return test_results_map
-def GetResultsMapFromXML(results_xml):
- """Parse the given results XML file and returns a map of TestResults."""
-
- results_xml_file = None
- try:
- results_xml_file = open(results_xml)
- except IOError:
- logging.error('Cannot open file %s', results_xml)
- return dict()
- node = minidom.parse(results_xml_file).documentElement
- results_xml_file.close()
-
- test_results_map = dict()
- testcases = node.getElementsByTagName('testcase')
-
- for testcase in testcases:
- name = testcase.getAttribute('name')
- classname = testcase.getAttribute('classname')
- test_name = '%s.%s' % (classname, name)
-
- failures = testcase.getElementsByTagName('failure')
- not_run = testcase.getAttribute('status') == 'notrun'
- elapsed = float(testcase.getAttribute('time'))
- result = TestResult(test_name,
- failed=bool(failures),
- not_run=not_run,
- elapsed_time=elapsed)
- test_results_map[canonical_name(test_name)] = [result]
-
- return test_results_map
-
-
def GenerateJSONResults(test_results_map, options):
"""Generates a JSON results file from the given test_results_map,
returning the associated generator for use with UploadJSONResults, below.
@@ -137,72 +101,3 @@ def UploadJSONResults(generator):
if generator:
generator.upload_json_files([FULL_RESULTS_FILENAME,
TIMES_MS_FILENAME])
-
-# For command-line testing.
-def main():
- # Builder base URL where we have the archived test results.
- # (Note: to be deprecated)
- BUILDER_BASE_URL = 'http://build.chromium.org/buildbot/gtest_results/'
-
- option_parser = optparse.OptionParser()
- option_parser.add_option('', '--test-type', default='',
- help='Test type that generated the results XML,'
- ' e.g. unit-tests.')
- option_parser.add_option('', '--results-directory', default='./',
- help='Output results directory source dir.')
- option_parser.add_option('', '--input-results-xml', default='',
- help='Test results xml file (input for us).'
- ' default is TEST_TYPE.xml')
- option_parser.add_option('', '--builder-base-url', default='',
- help=('A URL where we have the archived test '
- 'results. (default=%sTEST_TYPE_results/)'
- % BUILDER_BASE_URL))
- option_parser.add_option('', '--builder-name',
- default='DUMMY_BUILDER_NAME',
- help='The name of the builder shown on the '
- 'waterfall running this script e.g. WebKit.')
- option_parser.add_option('', '--build-name',
- default='DUMMY_BUILD_NAME',
- help='The name of the builder used in its path, '
- 'e.g. webkit-rel.')
- option_parser.add_option('', '--build-number', default='',
- help='The build number of the builder running'
- 'this script.')
- option_parser.add_option('', '--test-results-server',
- default='',
- help='The test results server to upload the '
- 'results.')
- option_parser.add_option('--master-name', default='',
- help='The name of the buildbot master. '
- 'Both test-results-server and master-name '
- 'need to be specified to upload the results '
- 'to the server.')
- option_parser.add_option('--webkit-revision', default='0',
- help='The WebKit revision being tested. If not '
- 'given, defaults to 0.')
- option_parser.add_option('--chrome-revision', default='0',
- help='The Chromium revision being tested. If not '
- 'given, defaults to 0.')
-
- options = option_parser.parse_args()[0]
-
- if not options.test_type:
- logging.error('--test-type needs to be specified.')
- sys.exit(1)
-
- if not options.input_results_xml:
- logging.error('--input-results-xml needs to be specified.')
- sys.exit(1)
-
- if options.test_results_server and not options.master_name:
- logging.warn('--test-results-server is given but '
- '--master-name is not specified; the results won\'t be '
- 'uploaded to the server.')
-
- results_map = GetResultsMapFromXML(options.input_results_xml)
- generator = GenerateJSONResults(results_map, options)
- UploadJSONResults(generator)
-
-
-if '__main__' == __name__:
- main()
diff --git a/infra/scripts/legacy/scripts/slave/runtest.py b/infra/scripts/legacy/scripts/slave/runtest.py
index fac25b9..42ee7db 100755
--- a/infra/scripts/legacy/scripts/slave/runtest.py
+++ b/infra/scripts/legacy/scripts/slave/runtest.py
@@ -11,22 +11,14 @@ build directory, e.g. chrome-release/build/.
For a list of command-line options, call this script with '--help'.
"""
-import ast
import copy
-import datetime
-import exceptions
-import gzip
-import hashlib
-import json
import logging
import optparse
import os
import platform
import re
-import stat
import subprocess
import sys
-import tempfile
from common import chromium_utils
from common import gtest_utils
diff --git a/infra/scripts/legacy/scripts/slave/slave_utils.py b/infra/scripts/legacy/scripts/slave/slave_utils.py
index 98435e1..76cf4e3 100644
--- a/infra/scripts/legacy/scripts/slave/slave_utils.py
+++ b/infra/scripts/legacy/scripts/slave/slave_utils.py
@@ -15,179 +15,12 @@ import tempfile
import time
from common import chromium_utils
-from slave.bootstrap import ImportMasterConfigs # pylint: disable=W0611
-from common.chromium_utils import GetActiveMaster # pylint: disable=W0611
# These codes used to distinguish true errors from script warnings.
ERROR_EXIT_CODE = 1
WARNING_EXIT_CODE = 88
-# Local errors.
-class PageHeapError(Exception):
- pass
-
-
-# Cache the path to gflags.exe.
-_gflags_exe = None
-
-
-def SubversionExe():
- # TODO(pamg): move this into platform_utils to support Mac and Linux.
- if chromium_utils.IsWindows():
- return 'svn.bat' # Find it in the user's path.
- elif chromium_utils.IsLinux() or chromium_utils.IsMac():
- return 'svn' # Find it in the user's path.
- else:
- raise NotImplementedError(
- 'Platform "%s" is not currently supported.' % sys.platform)
-
-
-def GitExe():
- return 'git.bat' if chromium_utils.IsWindows() else 'git'
-
-
-def SubversionCat(wc_dir):
- """Output the content of specified files or URLs in SVN.
- """
- try:
- return chromium_utils.GetCommandOutput([SubversionExe(), 'cat',
- wc_dir])
- except chromium_utils.ExternalError:
- return None
-
-
-class NotGitWorkingCopy(Exception): pass
-class NotSVNWorkingCopy(Exception): pass
-class NotAnyWorkingCopy(Exception): pass
-class InvalidSVNRevision(Exception): pass
-
-
-def ScrapeSVNInfoRevision(wc_dir, regexp):
- """Runs 'svn info' on a working copy and applies the supplied regex and
- returns the matched group as an int.
- regexp can be either a compiled regex or a string regex.
- throws NotSVNWorkingCopy if wc_dir is not in a working copy.
- throws InvalidSVNRevision if matched group is not alphanumeric.
- """
- if isinstance(regexp, (str, unicode)):
- regexp = re.compile(regexp)
- retval, svn_info = chromium_utils.GetStatusOutput([SubversionExe(), 'info',
- wc_dir])
- if retval or 'is not a working copy' in svn_info:
- raise NotSVNWorkingCopy(wc_dir)
- match = regexp.search(svn_info)
- if not match or not match.groups():
- raise InvalidSVNRevision(
- '%s did not match in svn info %s.' % (regexp.pattern, svn_info))
- text = match.group(1)
- if text.isalnum():
- return int(text)
- else:
- raise InvalidSVNRevision(text)
-
-
-def SubversionRevision(wc_dir):
- """Finds the last svn revision of a working copy and returns it as an int."""
- return ScrapeSVNInfoRevision(wc_dir, r'(?s).*Revision: (\d+).*')
-
-
-def SubversionLastChangedRevision(wc_dir_or_file):
- """Finds the last changed svn revision of a fs path returns it as an int."""
- return ScrapeSVNInfoRevision(wc_dir_or_file,
- r'(?s).*Last Changed Rev: (\d+).*')
-
-
-def GitHash(wc_dir):
- """Finds the current commit hash of the wc_dir."""
- retval, text = chromium_utils.GetStatusOutput(
- [GitExe(), 'rev-parse', 'HEAD'], cwd=wc_dir)
- if retval or 'fatal: Not a git repository' in text:
- raise NotGitWorkingCopy(wc_dir)
- return text.strip()
-
-
-def GetHashOrRevision(wc_dir):
- """Gets the svn revision or git hash of wc_dir as a string. Throws
- NotAnyWorkingCopy if neither are appropriate."""
- try:
- return str(SubversionRevision(wc_dir))
- except NotSVNWorkingCopy:
- pass
- try:
- return GitHash(wc_dir)
- except NotGitWorkingCopy:
- pass
- raise NotAnyWorkingCopy(wc_dir)
-
-
-def GitOrSubversion(wc_dir):
- """Returns the VCS for the given directory.
-
- Returns:
- 'svn' if the directory is a valid svn repo
- 'git' if the directory is a valid git repo root
- None otherwise
- """
- ret, out = chromium_utils.GetStatusOutput([SubversionExe(), 'info', wc_dir])
- if not ret and 'is not a working copy' not in out:
- return 'svn'
-
- ret, out = chromium_utils.GetStatusOutput(
- [GitExe(), 'rev-parse', '--is-inside-work-tree'], cwd=wc_dir)
- if not ret and 'fatal: Not a git repository' not in out:
- return 'git'
-
- return None
-
-
-def GetBuildRevisions(src_dir, webkit_dir=None, revision_dir=None):
- """Parses build revisions out of the provided directories.
-
- Args:
- src_dir: The source directory to be used to check the revision in.
- webkit_dir: Optional WebKit directory, relative to src_dir.
- revision_dir: If provided, this dir will be used for the build revision
- instead of the mandatory src_dir.
-
- Returns a tuple of the build revision and (optional) WebKit revision.
- NOTICE: These revisions are strings, since they can be both Subversion numbers
- and Git hashes.
- """
- abs_src_dir = os.path.abspath(src_dir)
- webkit_revision = None
- if webkit_dir:
- webkit_dir = os.path.join(abs_src_dir, webkit_dir)
- webkit_revision = GetHashOrRevision(webkit_dir)
-
- if revision_dir:
- revision_dir = os.path.join(abs_src_dir, revision_dir)
- build_revision = GetHashOrRevision(revision_dir)
- else:
- build_revision = GetHashOrRevision(src_dir)
- return (build_revision, webkit_revision)
-
-
-def GetZipFileNames(build_properties, build_revision, webkit_revision=None,
- extract=False, use_try_buildnumber=True):
- base_name = 'full-build-%s' % chromium_utils.PlatformName()
-
- if 'try' in build_properties.get('mastername', '') and use_try_buildnumber:
- if extract:
- if not build_properties.get('parent_buildnumber'):
- raise Exception('build_props does not have parent data: %s' %
- build_properties)
- version_suffix = '_%(parent_buildnumber)s' % build_properties
- else:
- version_suffix = '_%(buildnumber)s' % build_properties
- elif webkit_revision:
- version_suffix = '_wk%s_%s' % (webkit_revision, build_revision)
- else:
- version_suffix = '_%s' % build_revision
-
- return base_name, version_suffix
-
-
def SlaveBuildName(chrome_dir):
"""Extracts the build name of this slave (e.g., 'chrome-release') from the
leaf subdir of its build directory.
@@ -221,96 +54,6 @@ def SlaveBaseDir(chrome_dir):
return result
-def GetStagingDir(start_dir):
- """Creates a chrome_staging dir in the starting directory. and returns its
- full path.
- """
- start_dir = os.path.abspath(start_dir)
- staging_dir = os.path.join(SlaveBaseDir(start_dir), 'chrome_staging')
- chromium_utils.MaybeMakeDirectory(staging_dir)
- return staging_dir
-
-
-def SetPageHeap(chrome_dir, exe, enable):
- """Enables or disables page-heap checking in the given executable, depending
- on the 'enable' parameter. gflags_exe should be the full path to gflags.exe.
- """
- global _gflags_exe
- if _gflags_exe is None:
- _gflags_exe = chromium_utils.FindUpward(chrome_dir,
- 'tools', 'memory', 'gflags.exe')
- command = [_gflags_exe]
- if enable:
- command.extend(['/p', '/enable', exe, '/full'])
- else:
- command.extend(['/p', '/disable', exe])
- result = chromium_utils.RunCommand(command)
- if result:
- description = {True: 'enable', False: 'disable'}
- raise PageHeapError('Unable to %s page heap for %s.' %
- (description[enable], exe))
-
-
-def LongSleep(secs):
- """A sleep utility for long durations that avoids appearing hung.
-
- Sleeps for the specified duration. Prints output periodically so as not to
- look hung in order to avoid being timed out. Since this function is meant
- for long durations, it assumes that the caller does not care about losing a
- small amount of precision.
-
- Args:
- secs: The time to sleep, in seconds.
- """
- secs_per_iteration = 60
- time_slept = 0
-
- # Make sure we are dealing with an integral duration, since this function is
- # meant for long-lived sleeps we don't mind losing floating point precision.
- secs = int(round(secs))
-
- remainder = secs % secs_per_iteration
- if remainder > 0:
- time.sleep(remainder)
- time_slept += remainder
- sys.stdout.write('.')
- sys.stdout.flush()
-
- while time_slept < secs:
- time.sleep(secs_per_iteration)
- time_slept += secs_per_iteration
- sys.stdout.write('.')
- sys.stdout.flush()
-
- sys.stdout.write('\n')
-
-
-def RunPythonCommandInBuildDir(build_dir, target, command_line_args,
- server_dir=None, filter_obj=None):
- if sys.platform == 'win32':
- python_exe = 'python.exe'
- else:
- os.environ['PYTHONPATH'] = (chromium_utils.FindUpward(build_dir, 'tools',
- 'python')
- + ':' +os.environ.get('PYTHONPATH', ''))
- python_exe = 'python'
-
- command = [python_exe] + command_line_args
- return chromium_utils.RunCommand(command, filter_obj=filter_obj)
-
-
-class RunCommandCaptureFilter(object):
- lines = []
-
- def FilterLine(self, in_line):
- self.lines.append(in_line)
- return None
-
- def FilterDone(self, last_bits):
- self.lines.append(last_bits)
- return None
-
-
def GypFlagIsOn(options, flag):
value = GetGypFlag(options, flag, False)
# The values we understand as Off are False and a text zero.
@@ -329,220 +72,6 @@ def GetGypFlag(options, flag, default=None):
return gypflags[flag]
-def GSUtilSetup():
- # Get the path to the gsutil script.
- gsutil = os.path.join(os.path.dirname(__file__), 'gsutil')
- gsutil = os.path.normpath(gsutil)
- if chromium_utils.IsWindows():
- gsutil += '.bat'
-
- # Get the path to the boto file containing the password.
- boto_file = os.path.join(os.path.dirname(__file__), '..', '..', 'site_config',
- '.boto')
-
- # Make sure gsutil uses this boto file if it exists.
- if os.path.exists(boto_file):
- os.environ['AWS_CREDENTIAL_FILE'] = boto_file
- os.environ['BOTO_CONFIG'] = boto_file
- return gsutil
-
-
-def GSUtilGetMetadataField(name, provider_prefix=None):
- """Returns: (str) the metadata field to use with Google Storage
-
- The Google Storage specification for metadata can be found at:
- https://developers.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata
- """
- # Already contains custom provider prefix
- if name.lower().startswith('x-'):
- return name
-
- # See if it's innately supported by Google Storage
- if name in (
- 'Cache-Control',
- 'Content-Disposition',
- 'Content-Encoding',
- 'Content-Language',
- 'Content-MD5',
- 'Content-Type',
- ):
- return name
-
- # Add provider prefix
- if not provider_prefix:
- provider_prefix = 'x-goog-meta'
- return '%s-%s' % (provider_prefix, name)
-
-
-def GSUtilCopy(source, dest, mimetype=None, gs_acl=None, cache_control=None,
- metadata=None):
- """Copy a file to Google Storage.
-
- Runs the following command:
- gsutil -h Content-Type:<mimetype> \
- -h Cache-Control:<cache_control> \
- cp -a <gs_acl> file://<filename> <gs_base>/<subdir>/<filename w/o path>
-
- Args:
- source: the source URI
- dest: the destination URI
- mimetype: optional value to add as a Content-Type header
- gs_acl: optional value to add as a canned-acl
- cache_control: optional value to set Cache-Control header
- metadata: (dict) A dictionary of string key/value metadata entries to set
- (see `gsutil cp' '-h' option)
- Returns:
- The status code returned from running the generated gsutil command.
- """
-
- if not source.startswith('gs://') and not source.startswith('file://'):
- source = 'file://' + source
- if not dest.startswith('gs://') and not dest.startswith('file://'):
- dest = 'file://' + dest
- gsutil = GSUtilSetup()
- # Run the gsutil command. gsutil internally calls command_wrapper, which
- # will try to run the command 10 times if it fails.
- command = [gsutil]
-
- if not metadata:
- metadata = {}
- if mimetype:
- metadata['Content-Type'] = mimetype
- if cache_control:
- metadata['Cache-Control'] = cache_control
- for k, v in sorted(metadata.iteritems(), key=lambda (k, _): k):
- field = GSUtilGetMetadataField(k)
- param = (field) if v is None else ('%s:%s' % (field, v))
- command += ['-h', param]
- command.extend(['cp'])
- if gs_acl:
- command.extend(['-a', gs_acl])
- command.extend([source, dest])
- return chromium_utils.RunCommand(command)
-
-
-def GSUtilCopyFile(filename, gs_base, subdir=None, mimetype=None, gs_acl=None,
- cache_control=None, metadata=None):
- """Copy a file to Google Storage.
-
- Runs the following command:
- gsutil -h Content-Type:<mimetype> \
- -h Cache-Control:<cache_control> \
- cp -a <gs_acl> file://<filename> <gs_base>/<subdir>/<filename w/o path>
-
- Args:
- filename: the file to upload
- gs_base: the bucket to upload the file to
- subdir: optional subdirectory withing the bucket
- mimetype: optional value to add as a Content-Type header
- gs_acl: optional value to add as a canned-acl
- Returns:
- The status code returned from running the generated gsutil command.
- """
-
- source = 'file://' + filename
- dest = gs_base
- if subdir:
- # HACK(nsylvain): We can't use normpath here because it will break the
- # slashes on Windows.
- if subdir == '..':
- dest = os.path.dirname(gs_base)
- else:
- dest = '/'.join([gs_base, subdir])
- dest = '/'.join([dest, os.path.basename(filename)])
- return GSUtilCopy(source, dest, mimetype, gs_acl, cache_control,
- metadata=metadata)
-
-
-def GSUtilCopyDir(src_dir, gs_base, dest_dir=None, gs_acl=None,
- cache_control=None):
- """Upload the directory and its contents to Google Storage."""
-
- if os.path.isfile(src_dir):
- assert os.path.isdir(src_dir), '%s must be a directory' % src_dir
-
- gsutil = GSUtilSetup()
- command = [gsutil, '-m']
- if cache_control:
- command.extend(['-h', 'Cache-Control:%s' % cache_control])
- command.extend(['cp', '-R'])
- if gs_acl:
- command.extend(['-a', gs_acl])
- if dest_dir:
- command.extend([src_dir, gs_base + '/' + dest_dir])
- else:
- command.extend([src_dir, gs_base])
- return chromium_utils.RunCommand(command)
-
-def GSUtilDownloadFile(src, dst):
- """Copy a file from Google Storage."""
- gsutil = GSUtilSetup()
-
- # Run the gsutil command. gsutil internally calls command_wrapper, which
- # will try to run the command 10 times if it fails.
- command = [gsutil]
- command.extend(['cp', src, dst])
- return chromium_utils.RunCommand(command)
-
-
-def GSUtilMoveFile(source, dest, gs_acl=None):
- """Move a file on Google Storage."""
-
- gsutil = GSUtilSetup()
-
- # Run the gsutil command. gsutil internally calls command_wrapper, which
- # will try to run the command 10 times if it fails.
- command = [gsutil]
- command.extend(['mv', source, dest])
- status = chromium_utils.RunCommand(command)
-
- if status:
- return status
-
- if gs_acl:
- command = [gsutil]
- command.extend(['setacl', gs_acl, dest])
- status = chromium_utils.RunCommand(command)
-
- return status
-
-
-def GSUtilDeleteFile(filename):
- """Delete a file on Google Storage."""
-
- gsutil = GSUtilSetup()
-
- # Run the gsutil command. gsutil internally calls command_wrapper, which
- # will try to run the command 10 times if it fails.
- command = [gsutil]
- command.extend(['rm', filename])
- return chromium_utils.RunCommand(command)
-
-
-# Python doesn't support the type of variable scope in nested methods needed
-# to avoid the global output variable. This variable should only ever be used
-# by GSUtilListBucket.
-command_output = ''
-
-
-def GSUtilListBucket(gs_base, args):
- """List the contents of a Google Storage bucket."""
-
- gsutil = GSUtilSetup()
-
- # Run the gsutil command. gsutil internally calls command_wrapper, which
- # will try to run the command 10 times if it fails.
- global command_output
- command_output = ''
-
- def GatherOutput(line):
- global command_output
- command_output += line + '\n'
- command = [gsutil, 'ls'] + args + [gs_base]
- status = chromium_utils.RunCommand(command, parser_func=GatherOutput)
- return (status, command_output)
-
-
def LogAndRemoveFiles(temp_dir, regex_pattern):
"""Removes files in |temp_dir| that match |regex_pattern|.
This function prints out the name of each directory or filename before
@@ -706,30 +235,3 @@ def WriteLogLines(logname, lines, perf=None):
print '@@@STEP_LOG_END_PERF@%s@%s@@@' % (logname, perf)
else:
print '@@@STEP_LOG_END@%s@@@' % logname
-
-
-def ZipAndUpload(bucket, archive, *targets):
- """Uploads a zipped archive to the specified Google Storage bucket.
-
- Args:
- bucket: Google Storage bucket to upload to.
- archive: Name of the .zip archive.
- *targets: List of targets that should be included in the archive.
-
- Returns:
- Path to the uploaded archive on Google Storage.
- """
- local_archive = os.path.join(tempfile.mkdtemp(archive), archive)
- zip_cmd = [
- 'zip',
- '-9',
- '--filesync',
- '--recurse-paths',
- '--symlinks',
- local_archive,
- ]
- zip_cmd.extend(targets)
-
- chromium_utils.RunCommand(zip_cmd)
- GSUtilCopy(local_archive, 'gs://%s/%s' % (bucket, archive))
- return 'https://storage.cloud.google.com/%s/%s' % (bucket, archive)