summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authoraiolos <aiolos@chromium.org>2015-10-23 13:34:21 -0700
committerCommit bot <commit-bot@chromium.org>2015-10-23 20:35:08 +0000
commit36f0b9f32476db7a5aff5c2983fb34a94b3fcf97 (patch)
tree7edb25e25eff3ce387d973935cbd7c5c9065f8bc
parent2cbdbfe4cb34bdb68666ff687089977abc1afc16 (diff)
downloadchromium_src-36f0b9f32476db7a5aff5c2983fb34a94b3fcf97.zip
chromium_src-36f0b9f32476db7a5aff5c2983fb34a94b3fcf97.tar.gz
chromium_src-36f0b9f32476db7a5aff5c2983fb34a94b3fcf97.tar.bz2
Add Updating of cloud storage information in BaseConfigs.
This is the first step in having the reference build use the BinaryManager. BUG=544258 Review URL: https://codereview.chromium.org/1414453004 Cr-Commit-Position: refs/heads/master@{#355872}
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/base_config.py220
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/base_config_unittest.py1374
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/dependency_manager.py8
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/dependency_manager_unittest.py3
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/exceptions.py12
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/uploader.py106
-rw-r--r--tools/telemetry/catapult_base/dependency_manager/uploader_unittest.py91
7 files changed, 1732 insertions, 82 deletions
diff --git a/tools/telemetry/catapult_base/dependency_manager/base_config.py b/tools/telemetry/catapult_base/dependency_manager/base_config.py
index 9c21dea..8a02b5d 100644
--- a/tools/telemetry/catapult_base/dependency_manager/base_config.py
+++ b/tools/telemetry/catapult_base/dependency_manager/base_config.py
@@ -3,10 +3,13 @@
# found in the LICENSE file.
import json
+import logging
import os
+from catapult_base import cloud_storage
from catapult_base.dependency_manager import dependency_info
from catapult_base.dependency_manager import exceptions
+from catapult_base.dependency_manager import uploader
class BaseConfig(object):
@@ -87,13 +90,15 @@ class BaseConfig(object):
"""
self._config_path = file_path
self._writable = writable
- if not file_path:
+ self._is_dirty = False
+ self._pending_uploads = []
+ if not self._config_path:
raise ValueError('Must supply config file path.')
- if not os.path.exists(file_path):
+ if not os.path.exists(self._config_path):
if not writable:
raise exceptions.EmptyConfigError(file_path)
self._config_data = {}
- self.CreateEmptyConfig(file_path)
+ self._WriteConfigToFile(self._config_path, dependencies=self._config_data)
else:
with open(file_path, 'r') as f:
config_data = json.load(f)
@@ -118,13 +123,12 @@ class BaseConfig(object):
raise exceptions.ReadWriteError(
'Trying to read dependency info from a writable config. File for '
'config: %s' % self._config_path)
- for dep in self._config_data:
-
- base_path = os.path.dirname(self._config_path)
- dependency_dict = self._config_data.get(dep, {})
- platforms_dict = dependency_dict.get('file_info')
- cs_bucket = dependency_dict.get('cloud_storage_bucket', None)
+ base_path = os.path.dirname(self._config_path)
+ for dependency in self._config_data:
+ dependency_dict = self._config_data.get(dependency)
+ cs_bucket = dependency_dict.get('cloud_storage_bucket')
cs_base_folder = dependency_dict.get('cloud_storage_base_folder', '')
+ platforms_dict = dependency_dict.get('file_info', {})
for platform in platforms_dict:
platform_info = platforms_dict.get(platform)
local_paths = platform_info.get('local_paths', [])
@@ -144,7 +148,7 @@ class BaseConfig(object):
cs_remote_path = None
cs_hash = platform_info.get('cloud_storage_hash', None)
if cs_hash:
- cs_remote_file = '%s_%s' % (dep, cs_hash)
+ cs_remote_file = '%s_%s' % (dependency, cs_hash)
cs_remote_path = cs_remote_file if not cs_base_folder else (
'%s/%s' % (cs_base_folder, cs_remote_file))
@@ -152,29 +156,15 @@ class BaseConfig(object):
if download_path or cs_remote_path or cs_hash or version_in_cs:
dep_info = dependency_info.DependencyInfo(
- dep, platform, self._config_path, cs_bucket=cs_bucket,
+ dependency, platform, self._config_path, cs_bucket=cs_bucket,
cs_remote_path=cs_remote_path, download_path=download_path,
cs_hash=cs_hash, version_in_cs=version_in_cs,
local_paths=local_paths)
else:
dep_info = dependency_info.DependencyInfo(
- dep, platform, self._config_path, local_paths=local_paths)
- yield dep_info
-
- @classmethod
- def CreateEmptyConfig(cls, file_path):
- """Create an empty BaseConfig json dict and write it out to |file_path|.
+ dependency, platform, self._config_path, local_paths=local_paths)
- Raises:
- ValueError: If the path already exists.
- """
- if os.path.exists(file_path):
- raise ValueError('File already exists, and would be overwritten.')
- json_dict = {'config_type': cls.GetConfigType(),
- 'dependencies': {}}
- with open(file_path, 'w') as outfile:
- json.dump(json_dict, outfile, indent=2, sort_keys=True)
- return json_dict
+ yield dep_info
@classmethod
def GetConfigType(cls):
@@ -184,13 +174,105 @@ class BaseConfig(object):
def config_path(self):
return self._config_path
- def UpdateCloudStorageDependency(
- self, dependency, platform, dependency_path, version=None):
- """Update the cloud storage hash and the version for the given dependency.
+ def AddCloudStorageDependencyUpdateJob(
+ self, dependency, platform, dependency_path, version=None,
+ execute_job=True):
+ """Update the file downloaded from cloud storage for a dependency/platform.
+
+ Upload a new file to cloud storage for the given dependency and platform
+ pair and update the cloud storage hash and the version for the given pair.
+
+ Example usage:
+ The following should update the default platform for 'dep_name':
+ UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file')
+
+ The following should update both the mac and win platforms for 'dep_name',
+ or neither if either update fails:
+ UpdateCloudStorageDependency(
+ 'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False)
+ UpdateCloudStorageDependency(
+ 'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False)
+ ExecuteUpdateJobs()
+
+ Args:
+ dependency: The dependency to update.
+ platform: The platform to update the dependency info for.
+ dependency_path: Path to the new dependency to be used.
+ version: Version of the updated dependency, for checking future updates
+ against.
+ execute_job: True if the config should be written to disk and the file
+ should be uploaded to cloud storage after the update. False if
+ multiple updates should be performed atomically. Must call
+ ExecuteUpdateJobs after all non-executed jobs are added to complete
+ the update.
+
+ Raises:
+ ReadWriteError: If the config was not initialized as writable, or if
+ |execute_job| is True but the config has update jobs still pending
+ execution.
+ ValueError: If no information exists in the config for |dependency| on
+ |platform|.
+ """
+ self._ValidateIsConfigUpdatable(
+ execute_job=execute_job, dependency=dependency, platform=platform)
+ self._is_dirty = True
+ cs_hash = cloud_storage.CalculateHash(dependency_path)
+ if version:
+ self._SetPlatformData(dependency, platform, 'version_in_cs', version)
+ self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash)
+
+ cs_base_folder = self._GetPlatformData(
+ dependency, platform, 'cloud_storage_base_folder')
+ cs_bucket = self._GetPlatformData(
+ dependency, platform, 'cloud_storage_bucket')
+ cs_remote_path = self._CloudStorageRemotePath(
+ dependency, cs_hash, cs_base_folder)
+ self._pending_uploads.append(uploader.CloudStorageUploader(
+ cs_bucket, cs_remote_path, dependency_path))
+ if execute_job:
+ self.ExecuteUpdateJobs()
+
+ def ExecuteUpdateJobs(self, force=False):
+ """Write all config changes to the config_file specified in __init__.
+
+ Upload all files pending upload and then write the updated config to
+ file. Attempt to remove all uploaded files on failure.
+
+ Args:
+ force: True if files should be uploaded to cloud storage even if a
+ file already exists in the upload location.
+
+ Returns:
+ True: if the config was dirty and the upload succeeded.
+ False: if the config was not dirty.
+
+ Raises:
+ CloudStorageUploadConflictError: If |force| is False and the potential
+ upload location of a file already exists.
+ CloudStorageError: If copying an existing file to the backup location
+ or uploading a new file fails.
"""
- # TODO(aiolos): Only allow the config to be updated if writable is True to
- # avoid data changing underneath the dependency manager.
- raise NotImplementedError
+ self._ValidateIsConfigUpdatable()
+ if not self._is_dirty:
+ logging.info('ExecuteUpdateJobs called on clean config')
+ return False
+ if not self._pending_uploads:
+ logging.debug('No files needing upload.')
+ else:
+ try:
+ for item_pending_upload in self._pending_uploads:
+ item_pending_upload.Upload(force)
+ self._WriteConfigToFile(self._config_path, self._config_data)
+ self._pending_uploads = []
+ self._is_dirty = False
+ except:
+ # Attempt to rollback the update in any instance of failure, even user
+ # interrupt via Ctrl+C; but don't consume the exception.
+ logging.error('Update failed, attempting to roll it back.')
+ for upload_item in reversed(self._pending_uploads):
+ upload_item.Rollback()
+ raise
+ return True
def GetVersion(self, dependency, platform):
"""Return the Version information for the given dependency."""
@@ -201,9 +283,62 @@ class BaseConfig(object):
'this config.' % (dependency, platform))
return self._config_data[dependency][platform].get('version_in_cs')
+ def _SetPlatformData(self, dependency, platform, data_type, data):
+ self._ValidateIsConfigWritable()
+ dependency_dict = self._config_data.get(dependency, {})
+ platform_dict = dependency_dict.get('file_info', {}).get(platform)
+ if not platform_dict:
+ raise ValueError('No platform data for platform %s on dependency %s' %
+ (platform, dependency))
+ if (data_type == 'cloud_storage_bucket' or
+ data_type == 'cloud_storage_base_folder'):
+ self._config_data[dependency][data_type] = data
+ else:
+ self._config_data[dependency]['file_info'][platform][data_type] = data
+
+ def _GetPlatformData(self, dependency, platform, data_type=None):
+ dependency_dict = self._config_data.get(dependency, {})
+ platform_dict = dependency_dict.get('file_info', {}).get(platform)
+ if not platform_dict:
+ raise ValueError('No platform data for platform %s on dependency %s' %
+ (platform, dependency))
+ if data_type:
+ if (data_type == 'cloud_storage_bucket' or
+ data_type == 'cloud_storage_base_folder'):
+ return dependency_dict.get(data_type)
+ return platform_dict.get(data_type)
+ return platform_dict
+
+ def _ValidateIsConfigUpdatable(
+ self, execute_job=False, dependency=None, platform=None):
+ self._ValidateIsConfigWritable()
+ if self._is_dirty and execute_job:
+ raise exceptions.ReadWriteError(
+ 'A change has already been made to this config. Either call without'
+ 'using the execute_job option or first call ExecuteUpdateJobs().')
+ if dependency and not self._config_data.get(dependency):
+ raise ValueError('Cannot update information because dependency %s does '
+ 'not exist.' % dependency)
+ if platform and not self._GetPlatformData(dependency, platform):
+ raise ValueError('No dependency info is available for the given '
+ 'dependency: %s' % dependency)
+
+ def _ValidateIsConfigWritable(self):
+ if not self._writable:
+ raise exceptions.ReadWriteError(
+ 'Trying to update the information from a read-only config. '
+ 'File for config: %s' % self._config_path)
+
+ @staticmethod
+ def _CloudStorageRemotePath(dependency, cs_hash, cs_base_folder):
+ cs_remote_file = '%s_%s' % (dependency, cs_hash)
+ cs_remote_path = cs_remote_file if not cs_base_folder else (
+ '%s/%s' % (cs_base_folder, cs_remote_file))
+ return cs_remote_path
+
@classmethod
def _FormatPath(cls, file_path):
- """Format |file_path| for the current file system.
+ """ Format |file_path| for the current file system.
We may be downloading files for another platform, so paths must be
downloadable on the current system.
@@ -216,3 +351,20 @@ class BaseConfig(object):
return file_path.replace('/', os.path.sep)
return file_path
+ @classmethod
+ def _WriteConfigToFile(cls, file_path, dependencies=None):
+ json_dict = cls._GetJsonDict(dependencies)
+ file_dir = os.path.dirname(file_path)
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir)
+ with open(file_path, 'w') as outfile:
+ json.dump(json_dict, outfile, indent=2, sort_keys=True)
+ return json_dict
+
+ @classmethod
+ def _GetJsonDict(cls, dependencies=None):
+ dependencies = dependencies or {}
+ json_dict = {'config_type': cls.GetConfigType(),
+ 'dependencies': dependencies}
+ return json_dict
+
diff --git a/tools/telemetry/catapult_base/dependency_manager/base_config_unittest.py b/tools/telemetry/catapult_base/dependency_manager/base_config_unittest.py
index 5beee8c..e4fce4a 100644
--- a/tools/telemetry/catapult_base/dependency_manager/base_config_unittest.py
+++ b/tools/telemetry/catapult_base/dependency_manager/base_config_unittest.py
@@ -6,12 +6,1346 @@ import os
import unittest
import mock
+from pyfakefs import fake_filesystem_unittest
+from pyfakefs import fake_filesystem
+from catapult_base import cloud_storage
from catapult_base.dependency_manager import base_config
from catapult_base.dependency_manager import dependency_info
from catapult_base.dependency_manager import exceptions
+from catapult_base.dependency_manager import uploader
+class BaseConfigCreationAndUpdateUnittests(fake_filesystem_unittest.TestCase):
+ def setUp(self):
+ self.addTypeEqualityFunc(uploader.CloudStorageUploader,
+ uploader.CloudStorageUploader.__eq__)
+ self.setUpPyfakefs()
+ self.dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1'},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash12',
+ 'download_path': '../../relative/dep1/path2'}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1'},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+
+ self.expected_file_lines = [
+ '{', '"config_type": "BaseConfig",', '"dependencies": {',
+ '"dep1": {', '"cloud_storage_base_folder": "dependencies_folder",',
+ '"cloud_storage_bucket": "bucket1",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash11",',
+ '"download_path": "../../relative/dep1/path1"', '},',
+ '"plat2": {', '"cloud_storage_hash": "hash12",',
+ '"download_path": "../../relative/dep1/path2"', '}', '}', '},',
+ '"dep2": {', '"cloud_storage_bucket": "bucket2",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash21",',
+ '"download_path": "../../relative/dep2/path1"', '},',
+ '"plat2": {', '"cloud_storage_hash": "hash22",',
+ '"download_path": "../../relative/dep2/path2"', '}', '}', '}',
+ '}', '}']
+
+ self.file_path = os.path.abspath(os.path.join(
+ 'path', 'to', 'config', 'file'))
+
+ self.new_dep_path = 'path/to/new/dep'
+ self.fs.CreateFile(self.new_dep_path)
+ self.new_dep_hash = 'A23B56B7F23E798601F'
+ self.new_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1'},
+ 'plat2': {
+ 'cloud_storage_hash': self.new_dep_hash,
+ 'download_path': '../../relative/dep1/path2'}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1'},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ self.new_bucket = 'bucket1'
+ self.new_remote_path = 'dependencies_folder/dep1_%s' % self.new_dep_hash
+ self.new_pending_upload = uploader.CloudStorageUploader(
+ self.new_bucket, self.new_remote_path, self.new_dep_path)
+ self.expected_new_backup_path = '.'.join([self.new_remote_path, 'old'])
+ self.new_expected_file_lines = [
+ '{', '"config_type": "BaseConfig",', '"dependencies": {',
+ '"dep1": {', '"cloud_storage_base_folder": "dependencies_folder",',
+ '"cloud_storage_bucket": "bucket1",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash11",',
+ '"download_path": "../../relative/dep1/path1"', '},',
+ '"plat2": {', '"cloud_storage_hash": "%s",' % self.new_dep_hash,
+ '"download_path": "../../relative/dep1/path2"', '}', '}', '},',
+ '"dep2": {', '"cloud_storage_bucket": "bucket2",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash21",',
+ '"download_path": "../../relative/dep2/path1"', '},',
+ '"plat2": {', '"cloud_storage_hash": "hash22",',
+ '"download_path": "../../relative/dep2/path2"', '}', '}', '}',
+ '}', '}']
+
+ self.final_dep_path = 'path/to/final/dep'
+ self.fs.CreateFile(self.final_dep_path)
+ self.final_dep_hash = 'B34662F23B56B7F98601F'
+ self.final_bucket = 'bucket2'
+ self.final_remote_path = 'dep1_%s' % self.final_dep_hash
+ self.final_pending_upload = uploader.CloudStorageUploader(
+ self.final_bucket, self.final_remote_path, self.final_dep_path)
+ self.expected_final_backup_path = '.'.join([self.final_remote_path,
+ 'old'])
+ self.final_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1'},
+ 'plat2': {
+ 'cloud_storage_hash': self.new_dep_hash,
+ 'download_path': '../../relative/dep1/path2'}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': self.final_dep_hash,
+ 'download_path': '../../relative/dep2/path1'},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ self.final_expected_file_lines = [
+ '{', '"config_type": "BaseConfig",', '"dependencies": {',
+ '"dep1": {', '"cloud_storage_base_folder": "dependencies_folder",',
+ '"cloud_storage_bucket": "bucket1",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash11",',
+ '"download_path": "../../relative/dep1/path1"', '},',
+ '"plat2": {', '"cloud_storage_hash": "%s",' % self.new_dep_hash,
+ '"download_path": "../../relative/dep1/path2"', '}', '}', '},',
+ '"dep2": {', '"cloud_storage_bucket": "bucket2",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "%s",' % self.final_dep_hash,
+ '"download_path": "../../relative/dep2/path1"', '},',
+ '"plat2": {', '"cloud_storage_hash": "hash22",',
+ '"download_path": "../../relative/dep2/path2"', '}', '}', '}',
+ '}', '}']
+
+
+ def tearDown(self):
+ self.tearDownPyfakefs()
+
+ # Init is not meant to be overridden, so we should be mocking the
+ # base_config's json module, even in subclasses.
+ def testCreateEmptyConfig(self):
+ expected_file_lines = ['{',
+ '"config_type": "BaseConfig",',
+ '"dependencies": {}',
+ '}']
+ config = base_config.BaseConfig(self.file_path, writable=True)
+
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual({}, config._config_data)
+ self.assertEqual(self.file_path, config._config_path)
+
+ def testCreateEmptyConfigError(self):
+ self.assertRaises(exceptions.EmptyConfigError,
+ base_config.BaseConfig, self.file_path)
+
+ def testCloudStorageRemotePath(self):
+ dependency = 'dep_name'
+ cs_hash = self.new_dep_hash
+ cs_base_folder = 'dependency_remote_folder'
+ expected_remote_path = '%s/%s_%s' % (cs_base_folder, dependency, cs_hash)
+ remote_path = base_config.BaseConfig._CloudStorageRemotePath(
+ dependency, cs_hash, cs_base_folder)
+ self.assertEqual(expected_remote_path, remote_path)
+
+ cs_base_folder = 'dependency_remote_folder'
+ expected_remote_path = '%s_%s' % (dependency, cs_hash)
+ remote_path = base_config.BaseConfig._CloudStorageRemotePath(
+ dependency, cs_hash, cs_base_folder)
+
+ def testGetEmptyJsonDict(self):
+ expected_json_dict = {'config_type': 'BaseConfig',
+ 'dependencies': {}}
+ json_dict = base_config.BaseConfig._GetJsonDict()
+ self.assertEqual(expected_json_dict, json_dict)
+
+ def testGetNonEmptyJsonDict(self):
+ expected_json_dict = {"config_type": "BaseConfig",
+ "dependencies": self.dependencies}
+ json_dict = base_config.BaseConfig._GetJsonDict(self.dependencies)
+ self.assertEqual(expected_json_dict, json_dict)
+
+ def testWriteEmptyConfigToFile(self):
+ expected_file_lines = ['{', '"config_type": "BaseConfig",',
+ '"dependencies": {}', '}']
+ self.assertFalse(os.path.exists(self.file_path))
+ base_config.BaseConfig._WriteConfigToFile(self.file_path)
+ self.assertTrue(os.path.exists(self.file_path))
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+ def testWriteNonEmptyConfigToFile(self):
+ self.assertFalse(os.path.exists(self.file_path))
+ base_config.BaseConfig._WriteConfigToFile(self.file_path, self.dependencies)
+ self.assertTrue(os.path.exists(self.file_path))
+ expected_file_lines = list(self.expected_file_lines)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsNoOp(self, base_config_cs_mock, uploader_cs_mock):
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+
+ self.assertFalse(config.ExecuteUpdateJobs())
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(self.dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnInsertNoCSCollision(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = False
+ uploader_cs_mock.Insert.side_effect = cloud_storage.CloudStorageError
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = []
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnInsertCSCollisionForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ uploader_cs_mock.Insert.side_effect = cloud_storage.CloudStorageError
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = [mock.call(self.new_bucket, self.new_bucket,
+ self.new_remote_path,
+ self.expected_new_backup_path),
+ mock.call(self.new_bucket, self.new_bucket,
+ self.expected_new_backup_path,
+ self.new_remote_path)]
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs, force=True)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnInsertCSCollisionNoForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ uploader_cs_mock.Insert.side_effect = cloud_storage.CloudStorageError
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = []
+ expected_copy_calls = []
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnCopy(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ uploader_cs_mock.Copy.side_effect = cloud_storage.CloudStorageError
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = []
+ expected_copy_calls = [mock.call(self.new_bucket, self.new_bucket,
+ self.new_remote_path,
+ self.expected_new_backup_path)]
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs, force=True)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnSecondInsertNoCSCollision(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = False
+ uploader_cs_mock.Insert.side_effect = [
+ True, cloud_storage.CloudStorageError]
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path,
+ self.final_dep_path)]
+ expected_copy_calls = []
+ expected_delete_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnSecondInsertCSCollisionForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ uploader_cs_mock.Insert.side_effect = [
+ True, cloud_storage.CloudStorageError]
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path,
+ self.final_dep_path)]
+ expected_copy_calls = [mock.call(self.new_bucket, self.new_bucket,
+ self.new_remote_path,
+ self.expected_new_backup_path),
+ mock.call(self.final_bucket, self.final_bucket,
+ self.final_remote_path,
+ self.expected_final_backup_path),
+ mock.call(self.final_bucket, self.final_bucket,
+ self.expected_final_backup_path,
+ self.final_remote_path),
+ mock.call(self.new_bucket, self.new_bucket,
+ self.expected_new_backup_path,
+ self.new_remote_path)]
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs, force=True)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnSecondInsertFirstCSCollisionForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.side_effect = [True, False, True]
+ uploader_cs_mock.Insert.side_effect = [
+ True, cloud_storage.CloudStorageError]
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path,
+ self.final_dep_path)]
+ expected_copy_calls = [mock.call(self.new_bucket, self.new_bucket,
+ self.new_remote_path,
+ self.expected_new_backup_path),
+ mock.call(self.new_bucket, self.new_bucket,
+ self.expected_new_backup_path,
+ self.new_remote_path)]
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs, force=True)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnFirstCSCollisionNoForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.side_effect = [True, False, True]
+ uploader_cs_mock.Insert.side_effect = [
+ True, cloud_storage.CloudStorageError]
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = []
+ expected_copy_calls = []
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnSecondCopyCSCollision(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ uploader_cs_mock.Insert.return_value = True
+ uploader_cs_mock.Copy.side_effect = [
+ True, cloud_storage.CloudStorageError, True]
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = [mock.call(self.new_bucket, self.new_bucket,
+ self.new_remote_path,
+ self.expected_new_backup_path),
+ mock.call(self.final_bucket, self.final_bucket,
+ self.final_remote_path,
+ self.expected_final_backup_path),
+ mock.call(self.new_bucket, self.new_bucket,
+ self.expected_new_backup_path,
+ self.new_remote_path)]
+ expected_delete_calls = []
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs, force=True)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnSecondCopyNoCSCollisionForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.side_effect = [False, True, False]
+ uploader_cs_mock.Copy.side_effect = cloud_storage.CloudStorageError
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = [mock.call(self.final_bucket, self.final_bucket,
+ self.final_remote_path,
+ self.expected_final_backup_path)]
+ expected_delete_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs, force=True)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsFailureOnSecondCopyNoCSCollisionNoForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.side_effect = [False, True, False]
+ uploader_cs_mock.Copy.side_effect = cloud_storage.CloudStorageError
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = []
+ expected_delete_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+
+ self.assertRaises(cloud_storage.CloudStorageError,
+ config.ExecuteUpdateJobs)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsSuccessOnePendingDepNoCloudStorageCollision(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = False
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = []
+ expected_delete_calls = []
+
+ self.assertTrue(config.ExecuteUpdateJobs())
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.new_expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+ self.assertEqual(expected_delete_calls,
+ uploader_cs_mock.Delete.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsSuccessOnePendingDepCloudStorageCollision(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path)]
+ expected_copy_calls = [mock.call(self.new_bucket, self.new_bucket,
+ self.new_remote_path,
+ self.expected_new_backup_path)]
+
+ self.assertTrue(config.ExecuteUpdateJobs(force=True))
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(self.new_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.new_expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsErrorOnePendingDepCloudStorageCollisionNoForce(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.return_value = True
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.new_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload]
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path)]
+ expected_insert_calls = []
+ expected_copy_calls = []
+
+ self.assertRaises(exceptions.CloudStorageUploadConflictError,
+ config.ExecuteUpdateJobs)
+ self.assertTrue(config._is_dirty)
+ self.assertTrue(config._pending_uploads)
+ self.assertEqual(self.new_dependencies, config._config_data)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testExecuteUpdateJobsSuccessMultiplePendingDepsOneCloudStorageCollision(
+ self, base_config_cs_mock, uploader_cs_mock):
+ uploader_cs_mock.Exists.side_effect = [False, True]
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ config._config_data = self.final_dependencies.copy()
+ config._is_dirty = True
+ config._pending_uploads = [self.new_pending_upload,
+ self.final_pending_upload]
+ self.assertEqual(self.final_dependencies, config._config_data)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(2, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(self.final_pending_upload, config._pending_uploads[1])
+
+ expected_exists_calls = [mock.call(self.new_bucket, self.new_remote_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path)]
+ expected_insert_calls = [mock.call(self.new_bucket, self.new_remote_path,
+ self.new_dep_path),
+ mock.call(self.final_bucket,
+ self.final_remote_path,
+ self.final_dep_path)]
+ expected_copy_calls = [mock.call(self.final_bucket, self.final_bucket,
+ self.final_remote_path,
+ self.expected_final_backup_path)]
+
+ self.assertTrue(config.ExecuteUpdateJobs(force=True))
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(self.final_dependencies, config._config_data)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.final_expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(expected_insert_calls,
+ uploader_cs_mock.Insert.call_args_list)
+ self.assertEqual(expected_exists_calls,
+ uploader_cs_mock.Exists.call_args_list)
+ self.assertEqual(expected_copy_calls,
+ uploader_cs_mock.Copy.call_args_list)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testUpdateCloudStorageDependenciesReadOnlyConfig(
+ self, base_config_cs_mock, uploader_cs_mock):
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path)
+ self.assertRaises(
+ exceptions.ReadWriteError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path')
+ self.assertRaises(
+ exceptions.ReadWriteError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', version='1.2.3')
+ self.assertRaises(
+ exceptions.ReadWriteError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', execute_job=False)
+ self.assertRaises(
+ exceptions.ReadWriteError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', version='1.2.3', execute_job=False)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testUpdateCloudStorageDependenciesMissingDependency(
+ self, base_config_cs_mock, uploader_cs_mock):
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertRaises(ValueError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path')
+ self.assertRaises(ValueError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', version='1.2.3')
+ self.assertRaises(ValueError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', execute_job=False)
+ self.assertRaises(ValueError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', version='1.2.3', execute_job=False)
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testUpdateCloudStorageDependenciesWrite(
+ self, base_config_cs_mock, uploader_cs_mock):
+ expected_dependencies = self.dependencies
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertFalse(config._is_dirty)
+ self.assertEqual(expected_dependencies, config._config_data)
+
+ base_config_cs_mock.CalculateHash.return_value = self.new_dep_hash
+ uploader_cs_mock.Exists.return_value = False
+ expected_dependencies = self.new_dependencies
+ config.AddCloudStorageDependencyUpdateJob(
+ 'dep1', 'plat2', self.new_dep_path, execute_job=True)
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(expected_dependencies, config._config_data)
+ # check that file contents has been updated
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ expected_file_lines = list(self.new_expected_file_lines)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+ expected_dependencies = self.final_dependencies
+ base_config_cs_mock.CalculateHash.return_value = self.final_dep_hash
+ config.AddCloudStorageDependencyUpdateJob(
+ 'dep2', 'plat1', self.final_dep_path, execute_job=True)
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(expected_dependencies, config._config_data)
+ # check that file contents has been updated
+ expected_file_lines = list(self.final_expected_file_lines)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+ @mock.patch('catapult_base.dependency_manager.uploader.cloud_storage')
+ @mock.patch('catapult_base.dependency_manager.base_config.cloud_storage')
+ def testUpdateCloudStorageDependenciesNoWrite(
+ self, base_config_cs_mock, uploader_cs_mock):
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+ config = base_config.BaseConfig(self.file_path, writable=True)
+
+ self.assertRaises(ValueError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path')
+ self.assertRaises(ValueError, config.AddCloudStorageDependencyUpdateJob,
+ 'dep', 'plat', 'path', version='1.2.3')
+
+ expected_dependencies = self.dependencies
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertFalse(config._is_dirty)
+ self.assertFalse(config._pending_uploads)
+ self.assertEqual(expected_dependencies, config._config_data)
+
+ base_config_cs_mock.CalculateHash.return_value = self.new_dep_hash
+ uploader_cs_mock.Exists.return_value = False
+ expected_dependencies = self.new_dependencies
+ config.AddCloudStorageDependencyUpdateJob(
+ 'dep1', 'plat2', self.new_dep_path, execute_job=False)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(1, len(config._pending_uploads))
+ self.assertEqual(self.new_pending_upload, config._pending_uploads[0])
+ self.assertEqual(expected_dependencies, config._config_data)
+ # check that file contents have not been updated.
+ expected_file_lines = list(self.expected_file_lines)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+ expected_dependencies = self.final_dependencies
+ base_config_cs_mock.CalculateHash.return_value = self.final_dep_hash
+ config.AddCloudStorageDependencyUpdateJob(
+ 'dep2', 'plat1', self.final_dep_path, execute_job=False)
+ self.assertTrue(config._is_dirty)
+ self.assertEqual(expected_dependencies, config._config_data)
+ # check that file contents have not been updated.
+ expected_file_lines = list(self.expected_file_lines)
+ file_module = fake_filesystem.FakeFileOpen(self.fs)
+ for line in file_module(self.file_path):
+ self.assertEqual(expected_file_lines.pop(0), line.strip())
+ self.fs.CloseOpenFile(file_module(self.file_path))
+
+
+class BaseConfigDataManipulationUnittests(fake_filesystem_unittest.TestCase):
+ def setUp(self):
+ self.addTypeEqualityFunc(uploader.CloudStorageUploader,
+ uploader.CloudStorageUploader.__eq__)
+ self.setUpPyfakefs()
+
+ self.cs_bucket = 'bucket1'
+ self.cs_base_folder = 'dependencies_folder'
+ self.cs_hash = 'hash12'
+ self.download_path = '../../relative/dep1/path2'
+ self.local_paths = ['../../../relative/local/path21',
+ '../../../relative/local/path22']
+ self.platform_dict = {'cloud_storage_hash': self.cs_hash,
+ 'download_path': self.download_path,
+ 'local_paths': self.local_paths}
+ self.dependencies = {
+ 'dep1': {'cloud_storage_bucket': self.cs_bucket,
+ 'cloud_storage_base_folder': self.cs_base_folder,
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1',
+ 'local_paths': ['../../../relative/local/path11',
+ '../../../relative/local/path12']},
+ 'plat2': self.platform_dict
+ }
+ },
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1',
+ 'local_paths': ['../../../relative/local/path31',
+ '../../../relative/local/path32']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+
+ self.file_path = os.path.abspath(os.path.join(
+ 'path', 'to', 'config', 'file'))
+
+
+ self.expected_file_lines = [
+ '{', '"config_type": "BaseConfig",', '"dependencies": {',
+ '"dep1": {', '"cloud_storage_base_folder": "dependencies_folder",',
+ '"cloud_storage_bucket": "bucket1",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash11",',
+ '"download_path": "../../relative/dep1/path1",',
+ '"local_paths": [', '"../../../relative/local/path11",',
+ '"../../../relative/local/path12"', ']', '},',
+ '"plat2": {', '"cloud_storage_hash": "hash12",',
+ '"download_path": "../../relative/dep1/path2",',
+ '"local_paths": [', '"../../../relative/local/path21",',
+ '"../../../relative/local/path22"', ']',
+ '}', '}', '},',
+ '"dep2": {', '"cloud_storage_bucket": "bucket2",', '"file_info": {',
+ '"plat1": {', '"cloud_storage_hash": "hash21",',
+ '"download_path": "../../relative/dep2/path1",',
+ '"local_paths": [', '"../../../relative/local/path31",',
+ '"../../../relative/local/path32"', ']', '},',
+ '"plat2": {', '"cloud_storage_hash": "hash22",',
+ '"download_path": "../../relative/dep2/path2"', '}', '}', '}',
+ '}', '}']
+ self.fs.CreateFile(self.file_path,
+ contents='\n'.join(self.expected_file_lines))
+
+
+ def testSetPlatformDataFailureNotWritable(self):
+ config = base_config.BaseConfig(self.file_path)
+ self.assertRaises(exceptions.ReadWriteError, config._SetPlatformData,
+ 'dep1', 'plat1', 'cloud_storage_bucket', 'new_bucket')
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testSetPlatformDataFailure(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertRaises(ValueError, config._SetPlatformData, 'missing_dep',
+ 'plat2', 'cloud_storage_bucket', 'new_bucket')
+ self.assertEqual(self.dependencies, config._config_data)
+ self.assertRaises(ValueError, config._SetPlatformData, 'dep1',
+ 'missing_plat', 'cloud_storage_bucket', 'new_bucket')
+ self.assertEqual(self.dependencies, config._config_data)
+
+
+ def testSetPlatformDataCloudStorageBucketSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ updated_cs_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'new_bucket',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1',
+ 'local_paths': ['../../../relative/local/path11',
+ '../../../relative/local/path12']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash12',
+ 'download_path': '../../relative/dep1/path2',
+ 'local_paths': ['../../../relative/local/path21',
+ '../../../relative/local/path22']}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1',
+ 'local_paths': ['../../../relative/local/path31',
+ '../../../relative/local/path32']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ config._SetPlatformData('dep1', 'plat2', 'cloud_storage_bucket',
+ 'new_bucket')
+ self.assertEqual(updated_cs_dependencies, config._config_data)
+
+ def testSetPlatformDataCloudStorageBaseFolderSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ updated_cs_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'new_dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1',
+ 'local_paths': ['../../../relative/local/path11',
+ '../../../relative/local/path12']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash12',
+ 'download_path': '../../relative/dep1/path2',
+ 'local_paths': ['../../../relative/local/path21',
+ '../../../relative/local/path22']}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1',
+ 'local_paths': ['../../../relative/local/path31',
+ '../../../relative/local/path32']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ config._SetPlatformData('dep1', 'plat2', 'cloud_storage_base_folder',
+ 'new_dependencies_folder')
+ self.assertEqual(updated_cs_dependencies, config._config_data)
+
+ def testSetPlatformDataHashSuccess(self):
+ self.maxDiff = None
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ updated_cs_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1',
+ 'local_paths': ['../../../relative/local/path11',
+ '../../../relative/local/path12']},
+ 'plat2': {
+ 'cloud_storage_hash': 'new_hash',
+ 'download_path': '../../relative/dep1/path2',
+ 'local_paths': ['../../../relative/local/path21',
+ '../../../relative/local/path22']}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1',
+ 'local_paths': ['../../../relative/local/path31',
+ '../../../relative/local/path32']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ config._SetPlatformData('dep1', 'plat2', 'cloud_storage_hash',
+ 'new_hash')
+ self.assertEqual(updated_cs_dependencies, config._config_data)
+
+ def testSetPlatformDataDownloadPathSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ updated_cs_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1',
+ 'local_paths': ['../../../relative/local/path11',
+ '../../../relative/local/path12']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash12',
+ 'download_path': '../../new/dep1/path2',
+ 'local_paths': ['../../../relative/local/path21',
+ '../../../relative/local/path22']}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1',
+ 'local_paths': ['../../../relative/local/path31',
+ '../../../relative/local/path32']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ config._SetPlatformData('dep1', 'plat2', 'download_path',
+ '../../new/dep1/path2')
+ self.assertEqual(updated_cs_dependencies, config._config_data)
+
+ def testSetPlatformDataLocalPathSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ updated_cs_dependencies = {
+ 'dep1': {'cloud_storage_bucket': 'bucket1',
+ 'cloud_storage_base_folder': 'dependencies_folder',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash11',
+ 'download_path': '../../relative/dep1/path1',
+ 'local_paths': ['../../../relative/local/path11',
+ '../../../relative/local/path12']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash12',
+ 'download_path': '../../relative/dep1/path2',
+ 'local_paths': ['../../new/relative/local/path21',
+ '../../new/relative/local/path22']}}},
+ 'dep2': {'cloud_storage_bucket': 'bucket2',
+ 'file_info': {
+ 'plat1': {
+ 'cloud_storage_hash': 'hash21',
+ 'download_path': '../../relative/dep2/path1',
+ 'local_paths': ['../../../relative/local/path31',
+ '../../../relative/local/path32']},
+ 'plat2': {
+ 'cloud_storage_hash': 'hash22',
+ 'download_path': '../../relative/dep2/path2'}}}}
+ config._SetPlatformData('dep1', 'plat2', 'local_paths',
+ ['../../new/relative/local/path21',
+ '../../new/relative/local/path22'])
+ self.assertEqual(updated_cs_dependencies, config._config_data)
+
+ def testGetPlatformDataFailure(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertRaises(ValueError, config._GetPlatformData, 'missing_dep',
+ 'plat2', 'cloud_storage_bucket')
+ self.assertEqual(self.dependencies, config._config_data)
+ self.assertRaises(ValueError, config._GetPlatformData, 'dep1',
+ 'missing_plat', 'cloud_storage_bucket')
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testGetPlatformDataDictSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertEqual(self.platform_dict,
+ config._GetPlatformData('dep1', 'plat2'))
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testGetPlatformDataCloudStorageBucketSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertEqual(self.cs_bucket, config._GetPlatformData(
+ 'dep1', 'plat2', 'cloud_storage_bucket'))
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testGetPlatformDataCloudStorageBaseFolderSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertEqual(self.cs_base_folder, config._GetPlatformData(
+ 'dep1', 'plat2', 'cloud_storage_base_folder'))
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testGetPlatformDataHashSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertEqual(self.cs_hash, config._GetPlatformData(
+ 'dep1', 'plat2', 'cloud_storage_hash'))
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testGetPlatformDataDownloadPathSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertEqual(self.download_path, config._GetPlatformData(
+ 'dep1', 'plat2', 'download_path'))
+ self.assertEqual(self.dependencies, config._config_data)
+
+ def testGetPlatformDataLocalPathSuccess(self):
+ config = base_config.BaseConfig(self.file_path, writable=True)
+ self.assertEqual(self.local_paths, config._GetPlatformData(
+ 'dep1', 'plat2', 'local_paths'))
+ self.assertEqual(self.dependencies, config._config_data)
+
class BaseConfigTest(unittest.TestCase):
""" Subclassable unittests for BaseConfig.
For subclasses: override setUp, GetConfigDataFromDict,
@@ -67,46 +1401,6 @@ class BaseConfigTest(unittest.TestCase):
def GetConfigDataFromDict(self, config_dict):
return config_dict.get('dependencies', {})
-
- # Init is not meant to be overridden, so we should be mocking the
- # base_config's json module, even in subclasses.
- @mock.patch('catapult_base.dependency_manager.base_config.json.dump')
- @mock.patch('os.path.exists')
- @mock.patch('__builtin__.open')
- def testCreateEmptyConfig(self, open_mock, exists_mock, dump_mock):
- exists_mock.return_value = False
- expected_dump = mock.call(self.empty_dict, mock.ANY, sort_keys=True,
- indent=2)
- expected_open = mock.call('file_path', 'w')
- config_dict = self.config_class.CreateEmptyConfig('file_path')
- self.assertEqual(dump_mock.call_args, expected_dump)
- self.assertEqual(expected_open, open_mock.call_args)
- self.assertEqual(self.empty_dict, config_dict)
-
- exists_mock.return_value = True
- self.assertRaises(ValueError,
- self.config_class.CreateEmptyConfig, 'file_path')
-
-
- # Init is not meant to be overridden, so we should be mocking the
- # base_config's json module, even in subclasses.
- @mock.patch(
- 'catapult_base.dependency_manager.base_config.BaseConfig.CreateEmptyConfig') #pylint: disable=line-too-long
- @mock.patch('catapult_base.dependency_manager.base_config.json')
- @mock.patch('os.path')
- @mock.patch('__builtin__.open')
- def testInitNoFile(self, open_mock, path_mock, json_mock, create_config_mock):
- path_mock.exists.return_value = False
- # Writable config.
- config = self.config_class('file_path', writable=True)
- self.assertEqual(self.GetConfigDataFromDict(self.empty_dict),
- config._config_data)
- # Not writable config.
- self.assertRaises(exceptions.EmptyConfigError,
- self.config_class, 'file_path')
- create_config_mock.assert_called_once_with('file_path')
-
-
@mock.patch('os.path')
@mock.patch('__builtin__.open')
def testInitBaseProperties(self, open_mock, path_mock):
diff --git a/tools/telemetry/catapult_base/dependency_manager/dependency_manager.py b/tools/telemetry/catapult_base/dependency_manager/dependency_manager.py
index 9e1de84..daec6bd 100644
--- a/tools/telemetry/catapult_base/dependency_manager/dependency_manager.py
+++ b/tools/telemetry/catapult_base/dependency_manager/dependency_manager.py
@@ -89,8 +89,6 @@ class DependencyManager(object):
"""
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
- logging.error(
- 'The dependency_manager was not initialized with the dependency.')
if not try_support_binaries:
raise exceptions.NoPathFoundError(dependency, platform)
# TODO(aiolos): Remove the support_binaries call and always raise
@@ -103,8 +101,8 @@ class DependencyManager(object):
assert len(platform_parts) == 2
platform_os, platform_arch = platform_parts
logging.info('Calling into support_binaries with dependency %s, platform '
- '%s and arch %s.' % (dependency, platform_os,
- platform_arch))
+ '%s and arch %s. support_binaries is deprecated.'
+ % (dependency, platform_os, platform_arch))
return support_binaries.FindPath(dependency, platform_arch,
platform_os)
path = self._LocalPath(dependency_info)
@@ -141,8 +139,6 @@ class DependencyManager(object):
# system.
dependency_info = self._GetDependencyInfo(dependency, platform)
if not dependency_info:
- logging.error(
- 'The dependency_manager was not initialized with the dependency.')
if not try_support_binaries:
raise exceptions.NoPathFoundError(dependency, platform)
return support_binaries.FindLocallyBuiltPath(dependency)
diff --git a/tools/telemetry/catapult_base/dependency_manager/dependency_manager_unittest.py b/tools/telemetry/catapult_base/dependency_manager/dependency_manager_unittest.py
index e0486b6..59bce63 100644
--- a/tools/telemetry/catapult_base/dependency_manager/dependency_manager_unittest.py
+++ b/tools/telemetry/catapult_base/dependency_manager/dependency_manager_unittest.py
@@ -7,13 +7,12 @@ import stat
import unittest
import mock
+from pyfakefs import fake_filesystem_unittest
from catapult_base import dependency_manager
from catapult_base import cloud_storage
from catapult_base.dependency_manager import exceptions
-from pyfakefs import fake_filesystem_unittest
-
class DependencyManagerTest(unittest.TestCase):
diff --git a/tools/telemetry/catapult_base/dependency_manager/exceptions.py b/tools/telemetry/catapult_base/dependency_manager/exceptions.py
index 4d303aa..ecd5934 100644
--- a/tools/telemetry/catapult_base/dependency_manager/exceptions.py
+++ b/tools/telemetry/catapult_base/dependency_manager/exceptions.py
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from catapult_base import cloud_storage
+
class UnsupportedConfigFormatError(ValueError):
def __init__(self, config_type, config_file):
if not config_type:
@@ -12,6 +14,7 @@ class UnsupportedConfigFormatError(ValueError):
'by the dependency manager.' % (config_file, config_type))
super(UnsupportedConfigFormatError, self).__init__(message)
+
class EmptyConfigError(ValueError):
def __init__(self, file_path):
super(EmptyConfigError, self).__init__('Empty config at %s.' % file_path)
@@ -28,5 +31,14 @@ class NoPathFoundError(Exception):
'No file could be found locally, and no file to download from cloud '
'storage for %s on platform %s' % (dependency, platform))
+
class ReadWriteError(Exception):
pass
+
+
+class CloudStorageUploadConflictError(cloud_storage.CloudStorageError):
+ def __init__(self, bucket, path):
+ super(CloudStorageUploadConflictError, self).__init__(
+ 'File location %s already exists in bucket %s' % (path, bucket))
+
+
diff --git a/tools/telemetry/catapult_base/dependency_manager/uploader.py b/tools/telemetry/catapult_base/dependency_manager/uploader.py
new file mode 100644
index 0000000..08e9c8f
--- /dev/null
+++ b/tools/telemetry/catapult_base/dependency_manager/uploader.py
@@ -0,0 +1,106 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+
+from catapult_base import cloud_storage
+
+from catapult_base.dependency_manager import exceptions
+
+
+BACKUP_PATH_EXTENSION = 'old'
+
+
+class CloudStorageUploader(object):
+ def __init__(self, bucket, remote_path, local_path, cs_backup_path=None):
+ if not bucket or not remote_path or not local_path:
+ raise ValueError(
+ 'Attempted to partially initialize upload data with bucket %s, '
+ 'remote_path %s, and local_path %s', bucket, remote_path, local_path)
+ if not os.path.exists(local_path):
+ raise ValueError('Attempting to initilize UploadInfo with missing '
+ 'local path %s', local_path)
+
+ self._cs_bucket = bucket
+ self._cs_remote_path = remote_path
+ self._local_path = local_path
+ self._cs_backup_path = (cs_backup_path or
+ '%s.%s' % (self._cs_remote_path,
+ BACKUP_PATH_EXTENSION))
+ self._updated = False
+ self._backed_up = False
+
+ def Upload(self, force=False):
+ """Upload all pending files and then write the updated config to disk.
+
+ Will attempt to copy files existing in the upload location to a backup
+ location in the same bucket in cloud storage if |force| is True.
+
+ Args:
+ force: True if files should be uploaded to cloud storage even if a
+ file already exists in the upload location.
+
+ Raises:
+ CloudStorageUploadConflictError: If |force| is False and the potential
+ upload location of a file already exists.
+ CloudStorageError: If copying an existing file to the backup location
+ or uploading the new file fails.
+ """
+ if cloud_storage.Exists(self._cs_bucket, self._cs_remote_path):
+ if not force:
+ raise exceptions.CloudStorageUploadConflictError(self._cs_bucket,
+ self._cs_remote_path)
+ logging.debug('A file already exists at upload path %s in self.cs_bucket'
+ ' %s', self._cs_remote_path, self._cs_bucket)
+ try:
+ cloud_storage.Copy(self._cs_bucket, self._cs_bucket,
+ self._cs_remote_path, self._cs_backup_path)
+ self._backed_up = True
+ except cloud_storage.CloudStorageError:
+ logging.error('Failed to copy existing file %s in cloud storage bucket '
+ '%s to backup location %s', self._cs_remote_path, self._cs_bucket,
+ self._cs_backup_path)
+ raise
+
+ try:
+ cloud_storage.Insert(
+ self._cs_bucket, self._cs_remote_path, self._local_path)
+ except cloud_storage.CloudStorageError:
+ logging.error('Failed to upload %s to %s in cloud_storage bucket %s',
+ self._local_path, self._cs_remote_path, self._cs_bucket)
+ raise
+ self._updated = True
+
+ def Rollback(self):
+ """Attempt to undo the previous call to Upload.
+
+ Does nothing if no previous call to Upload was made, or if nothing was
+ successfully changed.
+
+ Returns:
+ True iff changes were successfully rolled back.
+ Raises:
+ CloudStorageError: If copying the backed up file to its original
+ location or removing the uploaded file fails.
+ """
+ cloud_storage_changed = False
+ if self._backed_up:
+ cloud_storage.Copy(self._cs_bucket, self._cs_bucket, self._cs_backup_path,
+ self._cs_remote_path)
+ cloud_storage_changed = True
+ self._cs_backup_path = None
+ elif self._updated:
+ cloud_storage.Delete(self._cs_bucket, self._cs_remote_path)
+ cloud_storage_changed = True
+ self._updated = False
+ return cloud_storage_changed
+
+ def __eq__(self, other, msg=None):
+ if type(self) != type(other):
+ return False
+ return (self._local_path == other._local_path and
+ self._cs_remote_path == other._cs_remote_path and
+ self._cs_bucket == other._cs_bucket)
+
diff --git a/tools/telemetry/catapult_base/dependency_manager/uploader_unittest.py b/tools/telemetry/catapult_base/dependency_manager/uploader_unittest.py
new file mode 100644
index 0000000..63c4119
--- /dev/null
+++ b/tools/telemetry/catapult_base/dependency_manager/uploader_unittest.py
@@ -0,0 +1,91 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pyfakefs import fake_filesystem_unittest
+
+from catapult_base.dependency_manager import uploader
+
+
+class CloudStorageUploaderTest(fake_filesystem_unittest.TestCase):
+ def setUp(self):
+ self.setUpPyfakefs()
+ self.bucket = 'cloud_storage_bucket'
+ self.local_path = os.path.abspath(os.path.join('path', 'to', 'dependency'))
+ self.fs.CreateFile(self.local_path)
+ self.remote_path = 'config_folder/remote_path'
+
+ def testCloudStorageUploaderMissingData(self):
+ self.assertRaises(ValueError, uploader.CloudStorageUploader,
+ None, self.remote_path, self.local_path)
+ self.assertRaises(ValueError, uploader.CloudStorageUploader,
+ self.bucket, None, self.local_path)
+ self.assertRaises(ValueError, uploader.CloudStorageUploader,
+ self.bucket, self.remote_path, None)
+
+ def testCloudStorageUploaderLocalFileMissing(self):
+ self.fs.RemoveObject(self.local_path)
+ self.assertRaises(ValueError, uploader.CloudStorageUploader,
+ self.bucket, self.remote_path, self.local_path)
+
+ def testCloudStorageUploaderCreation(self):
+ upload_data = uploader.CloudStorageUploader(
+ self.bucket, self.remote_path, self.local_path)
+ expected_bucket = self.bucket
+ expected_remote_path = self.remote_path
+ expected_cs_backup_path = '%s.old' % expected_remote_path
+ expected_local_path = self.local_path
+ self.assertEqual(expected_bucket, upload_data._cs_bucket)
+ self.assertEqual(expected_remote_path, upload_data._cs_remote_path)
+ self.assertEqual(expected_local_path, upload_data._local_path)
+ self.assertEqual(expected_cs_backup_path, upload_data._cs_backup_path)
+
+ def testCloudStorageUploaderEquality(self):
+ upload_data = uploader.CloudStorageUploader(
+ self.bucket, self.remote_path, self.local_path)
+ upload_data_exact = uploader.CloudStorageUploader(
+ self.bucket, self.remote_path, self.local_path)
+ upload_data_equal = uploader.CloudStorageUploader(
+ 'cloud_storage_bucket',
+ 'config_folder/remote_path',
+ os.path.abspath(os.path.join('path', 'to', 'dependency')))
+ self.assertEqual(upload_data, upload_data)
+ self.assertEqual(upload_data, upload_data_exact)
+ self.assertEqual(upload_data_exact, upload_data)
+ self.assertEqual(upload_data, upload_data_equal)
+ self.assertEqual(upload_data_equal, upload_data)
+
+
+ def testCloudStorageUploaderInequality(self):
+ new_local_path = os.path.abspath(os.path.join('new', 'local', 'path'))
+ self.fs.CreateFile(new_local_path)
+ new_bucket = 'new_bucket'
+ new_remote_path = 'new_remote/path'
+
+ upload_data = uploader.CloudStorageUploader(
+ self.bucket, self.remote_path, self.local_path)
+ upload_data_all_different = uploader.CloudStorageUploader(
+ new_bucket, new_remote_path, new_local_path)
+ upload_data_different_bucket = uploader.CloudStorageUploader(
+ new_bucket, self.remote_path, self.local_path)
+ upload_data_different_remote_path = uploader.CloudStorageUploader(
+ self.bucket, new_remote_path, self.local_path)
+ upload_data_different_local_path = uploader.CloudStorageUploader(
+ self.bucket, self.remote_path, new_local_path)
+
+ self.assertNotEqual(upload_data, 'a string!')
+ self.assertNotEqual(upload_data, 0)
+ self.assertNotEqual(upload_data, 2354)
+ self.assertNotEqual(upload_data, None)
+ self.assertNotEqual(upload_data, upload_data_all_different)
+ self.assertNotEqual(upload_data_all_different, upload_data)
+ self.assertNotEqual(upload_data, upload_data_different_bucket)
+ self.assertNotEqual(upload_data_different_bucket, upload_data)
+ self.assertNotEqual(upload_data, upload_data_different_remote_path)
+ self.assertNotEqual(upload_data_different_remote_path, upload_data)
+ self.assertNotEqual(upload_data, upload_data_different_local_path)
+ self.assertNotEqual(upload_data_different_local_path, upload_data)
+
+ #TODO: write unittests for upload and rollback