summaryrefslogtreecommitdiffstats
path: root/native_client_sdk
diff options
context:
space:
mode:
authorsbc@chromium.org <sbc@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-04-24 00:01:11 +0000
committersbc@chromium.org <sbc@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-04-24 00:01:11 +0000
commitd6fcd492f5679134979e6039a20edcae8f4b8ddd (patch)
treefa4a804422bcb15496f7fc8875a0edc227c2e62a /native_client_sdk
parent0cc321538ec6f61bf7b1518dd6a42bf60a6a997c (diff)
downloadchromium_src-d6fcd492f5679134979e6039a20edcae8f4b8ddd.zip
chromium_src-d6fcd492f5679134979e6039a20edcae8f4b8ddd.tar.gz
chromium_src-d6fcd492f5679134979e6039a20edcae8f4b8ddd.tar.bz2
[NaCl SDK] Cache downloaded SDK archives
Add max cache size and do LRU eviction of old archives based on file mtime. Also, don't use cygtar except on win32. I slows down the untar on unix by a factor of 2 if we do. BUG= Review URL: https://chromiumcodereview.appspot.com/14093013 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@195946 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'native_client_sdk')
-rwxr-xr-xnative_client_sdk/src/build_tools/build_updater.py2
-rw-r--r--native_client_sdk/src/build_tools/sdk_tools/command/update.py154
-rw-r--r--native_client_sdk/src/build_tools/sdk_tools/config.py19
-rwxr-xr-xnative_client_sdk/src/build_tools/sdk_tools/sdk_update_main.py27
-rwxr-xr-xnative_client_sdk/src/build_tools/tests/sdktools_commands_test.py54
-rwxr-xr-xnative_client_sdk/src/build_tools/tests/sdktools_test.py12
6 files changed, 209 insertions, 59 deletions
diff --git a/native_client_sdk/src/build_tools/build_updater.py b/native_client_sdk/src/build_tools/build_updater.py
index e945359..050fe0a 100755
--- a/native_client_sdk/src/build_tools/build_updater.py
+++ b/native_client_sdk/src/build_tools/build_updater.py
@@ -178,7 +178,7 @@ def BuildUpdater(out_dir, revision_number=None):
def main(args):
parser = optparse.OptionParser()
parser.add_option('-o', '--out', help='output directory',
- dest='out_dir', default='out')
+ dest='out_dir', default=os.path.join(SRC_DIR, 'out'))
parser.add_option('-r', '--revision', help='revision number of this updater',
dest='revision', default=None)
options, args = parser.parse_args(args[1:])
diff --git a/native_client_sdk/src/build_tools/sdk_tools/command/update.py b/native_client_sdk/src/build_tools/sdk_tools/command/update.py
index 3ae4f0d..f485804 100644
--- a/native_client_sdk/src/build_tools/sdk_tools/command/update.py
+++ b/native_client_sdk/src/build_tools/sdk_tools/command/update.py
@@ -2,9 +2,11 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import hashlib
import copy
import logging
import os
+import subprocess
import sys
import urlparse
import urllib2
@@ -30,6 +32,7 @@ except ImportError:
RECOMMENDED = 'recommended'
SDK_TOOLS = 'sdk_tools'
HTTP_CONTENT_LENGTH = 'Content-Length' # HTTP Header field for content length
+DEFAULT_CACHE_SIZE = 512 * 1024 * 1024 # 1/2 Gb cache by default
class UpdateDelegate(object):
@@ -45,18 +48,88 @@ class UpdateDelegate(object):
class RealUpdateDelegate(UpdateDelegate):
- def __init__(self, user_data_dir, install_dir):
+ def __init__(self, user_data_dir, install_dir, cfg):
UpdateDelegate.__init__(self)
- self.user_data_dir = user_data_dir
+ self.archive_cache = os.path.join(user_data_dir, 'archives')
self.install_dir = install_dir
+ self.cache_max = getattr(cfg, 'cache_max', DEFAULT_CACHE_SIZE)
def BundleDirectoryExists(self, bundle_name):
bundle_path = os.path.join(self.install_dir, bundle_name)
return os.path.isdir(bundle_path)
+ def VerifyDownload(self, filename, archive):
+ """Verify that a local filename in the cache matches the given
+ online archive.
+
+ Returns True if both size and sha1 match, False otherwise.
+ """
+ filename = os.path.join(self.archive_cache, filename)
+ if not os.path.exists(filename):
+ logging.info('File does not exist: %s.' % filename)
+ return False
+ size = os.path.getsize(filename)
+ if size != archive.size:
+ logging.info('File size does not match (%d vs %d): %s.' % (size,
+ archive.size, filename))
+ return False
+ sha1_hash = hashlib.sha1()
+ with open(filename) as f:
+ sha1_hash.update(f.read())
+ if sha1_hash.hexdigest() != archive.GetChecksum():
+ logging.info('File hash does not match: %s.' % filename)
+ return False
+ return True
+
+ def BytesUsedInCache(self):
+ """Determine number of bytes currently be in local archive cache."""
+ total = 0
+ for root, _, files in os.walk(self.archive_cache):
+ for filename in files:
+ total += os.path.getsize(os.path.join(root, filename))
+ return total
+
+ def CleanupCache(self):
+ """Remove archives from the local filesystem cache until the
+ total size is below cache_max.
+
+ This is done my deleting the oldest archive files until the
+ condition is satisfied. If cache_max is zero then the entire
+ cache will be removed.
+ """
+ used = self.BytesUsedInCache()
+ logging.info('Cache usage: %d / %d' % (used, self.cache_max))
+ if used <= self.cache_max:
+ return
+ clean_bytes = used - self.cache_max
+
+ logging.info('Clearing %d bytes in archive cache' % clean_bytes)
+ file_timestamps = []
+ for root, _, files in os.walk(self.archive_cache):
+ for filename in files:
+ fullname = os.path.join(root, filename)
+ file_timestamps.append((os.path.getmtime(fullname), fullname))
+
+ file_timestamps.sort()
+ while clean_bytes > 0:
+ assert(file_timestamps)
+ filename_to_remove = file_timestamps[0][1]
+ clean_bytes -= os.path.getsize(filename_to_remove)
+ logging.info('Removing from cache: %s' % filename_to_remove)
+ os.remove(filename_to_remove)
+ # Also remove resulting empty parent directory structure
+ while True:
+ filename_to_remove = os.path.dirname(filename_to_remove)
+ if not os.listdir(filename_to_remove):
+ os.rmdir(filename_to_remove)
+ else:
+ break
+ file_timestamps = file_timestamps[1:]
+
def DownloadToFile(self, url, dest_filename):
- sdk_update_common.MakeDirs(self.user_data_dir)
- dest_path = os.path.join(self.user_data_dir, dest_filename)
+ dest_path = os.path.join(self.archive_cache, dest_filename)
+ sdk_update_common.MakeDirs(os.path.dirname(dest_path))
+
out_stream = None
url_stream = None
try:
@@ -101,28 +174,31 @@ class RealUpdateDelegate(UpdateDelegate):
raise Error('Unable to chdir into "%s".\n %s' % (extract_path, e))
for i, archive in enumerate(archives):
- archive_path = os.path.join(self.user_data_dir, archive)
+ archive_path = os.path.join(self.archive_cache, archive)
- try:
- logging.info('Opening file %s (%d/%d).' % (archive_path, i + 1,
- len(archives)))
+ if len(archives) > 1:
+ print '(file %d/%d - "%s")' % (
+ i + 1, len(archives), os.path.basename(archive_path))
+ logging.info('Extracting to %s' % (extract_path,))
+
+ if sys.platform == 'win32':
+ try:
+ logging.info('Opening file %s (%d/%d).' % (archive_path, i + 1,
+ len(archives)))
+ try:
+ tar_file = cygtar.CygTar(archive_path, 'r', verbose=True)
+ except Exception as e:
+ raise Error("Can't open archive '%s'.\n %s" % (archive_path, e))
+
+ tar_file.Extract()
+ finally:
+ if tar_file:
+ tar_file.Close()
+ else:
try:
- tar_file = cygtar.CygTar(archive_path, 'r', verbose=True)
- except Exception as e:
- raise Error('Can\'t open archive "%s".\n %s' % (archive_path, e))
-
- logging.info('Extracting to %s' % (extract_path,))
- if len(archives) > 1:
- print '(file %d/%d - "%s")' % (
- i + 1, len(archives), os.path.basename(archive_path))
- tar_file.Extract()
- finally:
- if tar_file:
- tar_file.Close()
-
- # Remove the archive.
- if os.path.exists(archive_path):
- os.remove(archive_path)
+ subprocess.check_call(['tar', 'xf', archive_path])
+ except subprocess.CalledProcessError:
+ raise Error('Error extracting archive: %s' % archive_path)
logging.info('Changing the directory to %s' % (curpath,))
os.chdir(curpath)
@@ -236,15 +312,20 @@ def _UpdateBundle(delegate, bundle, local_manifest):
archive_filenames = []
- print 'Downloading bundle %s' % (bundle.name,)
+ shown_banner = False
for i, archive in enumerate(archives):
- if len(archives) > 1:
- print '(file %d/%d - "%s")' % (
- i + 1, len(archives), os.path.basename(archive.url))
-
archive_filename = _GetFilenameFromURL(archive.url)
- sha1, size = delegate.DownloadToFile(archive.url, archive_filename)
- _ValidateArchive(archive, sha1, size)
+ archive_filename = os.path.join(bundle.name, archive_filename)
+
+ if not delegate.VerifyDownload(archive_filename, archive):
+ if not shown_banner:
+ shown_banner = True
+ print 'Downloading bundle %s' % (bundle.name,)
+ if len(archives) > 1:
+ print '(file %d/%d - "%s")' % (
+ i + 1, len(archives), os.path.basename(archive.url))
+ sha1, size = delegate.DownloadToFile(archive.url, archive_filename)
+ _ValidateArchive(archive, sha1, size)
archive_filenames.append(archive_filename)
@@ -271,17 +352,18 @@ def _UpdateBundle(delegate, bundle, local_manifest):
logging.info('Updating local manifest to include bundle %s' % (bundle.name))
local_manifest.MergeBundle(bundle)
+ delegate.CleanupCache()
def _GetFilenameFromURL(url):
- _, _, path, _, _, _ = urlparse.urlparse(url)
- return path.split('/')[-1]
+ path = urlparse.urlparse(url)[2]
+ return os.path.basename(path)
def _ValidateArchive(archive, actual_sha1, actual_size):
- if actual_sha1 != archive.GetChecksum():
- raise Error('SHA1 checksum mismatch on "%s". Expected %s but got %s' % (
- archive.name, archive.GetChecksum(), actual_sha1))
if actual_size != archive.size:
raise Error('Size mismatch on "%s". Expected %s but got %s bytes' % (
archive.name, archive.size, actual_size))
+ if actual_sha1 != archive.GetChecksum():
+ raise Error('SHA1 checksum mismatch on "%s". Expected %s but got %s' % (
+ archive.name, archive.GetChecksum(), actual_sha1))
diff --git a/native_client_sdk/src/build_tools/sdk_tools/config.py b/native_client_sdk/src/build_tools/sdk_tools/config.py
index e35e7d5..6fc9be07 100644
--- a/native_client_sdk/src/build_tools/sdk_tools/config.py
+++ b/native_client_sdk/src/build_tools/sdk_tools/config.py
@@ -5,6 +5,7 @@
import json
import logging
import urlparse
+from sdk_update_common import Error
SOURCE_WHITELIST = [
'http://localhost/', # For testing.
@@ -33,14 +34,26 @@ class Config(dict):
else:
self.sources = []
+ def LoadJson(self, json_data):
+ try:
+ self.update(json.loads(json_data))
+ except Exception as e:
+ raise Error('Error reading json config:\n%s' % str(e))
+
def ToJson(self):
- return json.dumps(self, sort_keys=False, indent=2)
+ try:
+ return json.dumps(self, sort_keys=False, indent=2)
+ except Exception as e:
+ raise Error('Json encoding error writing config:\n%s' % e)
def __getattr__(self, name):
- return self.__getitem__(name)
+ if name in self:
+ return self[name]
+ else:
+ raise AttributeError('Config does not contain: %s' % name)
def __setattr__(self, name, value):
- return self.__setitem__(name, value)
+ self[name] = value
def AddSource(self, source):
if not IsSourceValid(source):
diff --git a/native_client_sdk/src/build_tools/sdk_tools/sdk_update_main.py b/native_client_sdk/src/build_tools/sdk_tools/sdk_update_main.py
index b38933d..f763b26 100755
--- a/native_client_sdk/src/build_tools/sdk_tools/sdk_update_main.py
+++ b/native_client_sdk/src/build_tools/sdk_tools/sdk_update_main.py
@@ -8,7 +8,6 @@
import config
import cStringIO
import download
-import json
import logging
import optparse
import os
@@ -56,23 +55,29 @@ def hide(fn):
def LoadConfig(raise_on_error=False):
path = os.path.join(USER_DATA_DIR, CONFIG_FILENAME)
+ cfg = config.Config()
if not os.path.exists(path):
- return config.Config()
+ return cfg
try:
try:
with open(path) as f:
- return config.Config(json.loads(f.read()))
+ file_data = f.read()
except IOError as e:
raise Error('Unable to read config from "%s".\n %s' % (path, e))
- except Exception as e:
+
+ try:
+ cfg.LoadJson(file_data)
+ except Error as e:
raise Error('Parsing config file from "%s" failed.\n %s' % (path, e))
+ return cfg
except Error as e:
if raise_on_error:
raise
else:
logging.warn(str(e))
- return config.Config()
+
+ return cfg
def WriteConfig(cfg):
@@ -82,10 +87,7 @@ def WriteConfig(cfg):
except Exception as e:
raise Error('Unable to create directory "%s".\n %s' % (USER_DATA_DIR, e))
- try:
- cfg_json = cfg.ToJson()
- except Exception as e:
- raise Error('Json encoding error writing config "%s".\n %s' % (path, e))
+ cfg_json = cfg.ToJson()
try:
with open(path, 'w') as f:
@@ -210,7 +212,7 @@ def CMDupdate(parser, args):
try:
delegate = command.update.RealUpdateDelegate(USER_DATA_DIR,
- DEFAULT_SDK_ROOT)
+ DEFAULT_SDK_ROOT, cfg)
command.update.Update(delegate, remote_manifest, local_manifest, args,
options.force)
finally:
@@ -260,9 +262,10 @@ def CMDreinstall(parser, args):
parser.error('No bundles given')
return 0
+ cfg = LoadConfig()
try:
delegate = command.update.RealUpdateDelegate(USER_DATA_DIR,
- DEFAULT_SDK_ROOT)
+ DEFAULT_SDK_ROOT, cfg)
command.update.Reinstall(delegate, local_manifest, args)
finally:
# Always write out the local manifest, we may have successfully updated one
@@ -348,7 +351,7 @@ def UpdateSDKTools(options, args):
try:
delegate = command.update.RealUpdateDelegate(USER_DATA_DIR,
- DEFAULT_SDK_ROOT)
+ DEFAULT_SDK_ROOT, cfg)
command.update.UpdateBundleIfNeeded(
delegate,
remote_manifest,
diff --git a/native_client_sdk/src/build_tools/tests/sdktools_commands_test.py b/native_client_sdk/src/build_tools/tests/sdktools_commands_test.py
index 15933e1..d5e9841 100755
--- a/native_client_sdk/src/build_tools/tests/sdktools_commands_test.py
+++ b/native_client_sdk/src/build_tools/tests/sdktools_commands_test.py
@@ -46,7 +46,7 @@ class TestCommands(SdkToolsTestCase):
try:
dummy_path = os.path.join(temp_dir, filename)
with open(dummy_path, 'w') as stream:
- stream.write('Dummy stuff for %s' % (bundle_name,))
+ stream.write('Dummy stuff for %s' % bundle_name)
# Build the tarfile directly into the server's directory.
tar_path = os.path.join(self.basedir, tarname)
@@ -179,7 +179,7 @@ class TestCommands(SdkToolsTestCase):
"""The update command should install the contents of a bundle to the SDK."""
self._AddDummyBundle(self.manifest, 'pepper_23')
self._WriteManifest()
- output = self._Run(['update', 'pepper_23'])
+ self._Run(['update', 'pepper_23'])
self.assertTrue(os.path.exists(
os.path.join(self.basedir, 'nacl_sdk', 'pepper_23', 'dummy.txt')))
@@ -189,7 +189,7 @@ class TestCommands(SdkToolsTestCase):
self._AddDummyBundle(self.manifest, 'pepper_23')
self._WriteCacheManifest(self.manifest)
self._WriteManifest()
- output = self._Run(['update', 'pepper_23'])
+ self._Run(['update', 'pepper_23'])
self.assertTrue(os.path.exists(
os.path.join(self.basedir, 'nacl_sdk', 'pepper_23', 'dummy.txt')))
@@ -349,6 +349,54 @@ class TestCommands(SdkToolsTestCase):
output = self._Run(['list', '-r'])
self.assertTrue(re.search('I\*\s+pepper_23.*?r1337.*?r1338', output))
+ def testArchiveCacheBasic(self):
+ """Downloaded archives should be stored in the cache by default."""
+ self._AddDummyBundle(self.manifest, 'pepper_23')
+ self._WriteManifest()
+ self._Run(['update', 'pepper_23'])
+ archive_cache = os.path.join(self.cache_dir, 'archives')
+ cache_contents = os.listdir(archive_cache)
+ self.assertEqual(cache_contents, ['pepper_23'])
+ cache_contents = os.listdir(os.path.join(archive_cache, 'pepper_23'))
+ self.assertEqual(cache_contents, ['pepper_23.tar.bz2'])
+
+ def testArchiveCacheEviction(self):
+ archive_cache = os.path.join(self.cache_dir, 'archives')
+ self._AddDummyBundle(self.manifest, 'pepper_23')
+ self._AddDummyBundle(self.manifest, 'pepper_22')
+ self._WriteManifest()
+
+ # First install pepper_23
+ self._Run(['update', 'pepper_23'])
+ archive = os.path.join(archive_cache, 'pepper_23', 'pepper_23.tar.bz2')
+ archive_size = os.path.getsize(archive)
+
+ # Set the mtime on the pepper_23 bundle to be a few seconds in the past.
+ # This is needed so that the two bundles don't end up with the same
+ # timestamp which can happen on systems that don't report sub-second
+ # timestamps.
+ atime = os.path.getatime(archive)
+ mtime = os.path.getmtime(archive)
+ os.utime(archive, (atime, mtime-10))
+
+ # Set cache limit to size of pepper archive * 1.5
+ self._WriteConfig('{ "cache_max": %d }' % int(archive_size * 1.5))
+
+ # Now install pepper_22, which should cause pepper_23 to be evicted
+ self._Run(['update', 'pepper_22'])
+ cache_contents = os.listdir(archive_cache)
+ self.assertEqual(cache_contents, ['pepper_22'])
+
+ def testArchiveCacheZero(self):
+ """Archives should not be cached when cache_max is zero."""
+ self._AddDummyBundle(self.manifest, 'pepper_23')
+ self._WriteConfig('{ "cache_max": 0 }')
+ self._AddDummyBundle(self.manifest, 'pepper_23')
+ self._WriteManifest()
+ self._Run(['update', 'pepper_23'])
+ archive_cache = os.path.join(self.cache_dir, 'archives')
+ # Archive folder should be completely remove by cache cleanup
+ self.assertFalse(os.path.exists(archive_cache))
if __name__ == '__main__':
unittest.main()
diff --git a/native_client_sdk/src/build_tools/tests/sdktools_test.py b/native_client_sdk/src/build_tools/tests/sdktools_test.py
index 078f212..6f5a6ee 100755
--- a/native_client_sdk/src/build_tools/tests/sdktools_test.py
+++ b/native_client_sdk/src/build_tools/tests/sdktools_test.py
@@ -40,6 +40,7 @@ class SdkToolsTestCase(unittest.TestCase):
def SetupWithBaseDirPrefix(self, basedir_prefix, tmpdir=None):
self.basedir = tempfile.mkdtemp(prefix=basedir_prefix, dir=tmpdir)
+ self.cache_dir = os.path.join(self.basedir, 'nacl_sdk', 'sdk_cache')
# We have to make sure that we build our updaters with a version that is at
# least as large as the version in the sdk_tools bundle. If not, update
# tests may fail because the "current" version (according to the sdk_cache)
@@ -65,19 +66,22 @@ class SdkToolsTestCase(unittest.TestCase):
This manifest should only contain the sdk_tools bundle.
"""
- manifest_filename = os.path.join(self.basedir, 'nacl_sdk', 'sdk_cache',
- MANIFEST_BASENAME)
+ manifest_filename = os.path.join(self.cache_dir, MANIFEST_BASENAME)
self.manifest = manifest_util.SDKManifest()
self.manifest.LoadDataFromString(open(manifest_filename).read())
self.sdk_tools_bundle = self.manifest.GetBundle('sdk_tools')
+ def _WriteConfig(self, config_data):
+ config_filename = os.path.join(self.cache_dir, 'naclsdk_config.json')
+ with open(config_filename, 'w') as stream:
+ stream.write(config_data)
+
def _WriteCacheManifest(self, manifest):
"""Write the manifest at nacl_sdk/sdk_cache.
This is useful for faking having installed a bundle.
"""
- manifest_filename = os.path.join(self.basedir, 'nacl_sdk', 'sdk_cache',
- MANIFEST_BASENAME)
+ manifest_filename = os.path.join(self.cache_dir, MANIFEST_BASENAME)
with open(manifest_filename, 'w') as stream:
stream.write(manifest.GetDataAsString())