diff options
author | kalman@chromium.org <kalman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-09-17 22:21:41 +0000 |
---|---|---|
committer | kalman@chromium.org <kalman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-09-17 22:21:41 +0000 |
commit | c43d0a8ca8387b48d79cacda638c6705a726bf7d (patch) | |
tree | 8bbdb0ff12d1554a453d07ebaa765dc2e2fcff86 /chrome/common/extensions/docs/build | |
parent | 853c2ca962f5610ec3b084d2e712a21bd69887c5 (diff) | |
download | chromium_src-c43d0a8ca8387b48d79cacda638c6705a726bf7d.zip chromium_src-c43d0a8ca8387b48d79cacda638c6705a726bf7d.tar.gz chromium_src-c43d0a8ca8387b48d79cacda638c6705a726bf7d.tar.bz2 |
Delete old static extension docs and server2's converter.py.
BUG=147714
Review URL: https://codereview.chromium.org/10928228
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@157217 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/common/extensions/docs/build')
-rwxr-xr-x | chrome/common/extensions/docs/build/build.py | 12 | ||||
-rwxr-xr-x | chrome/common/extensions/docs/build/build.sh | 36 | ||||
-rw-r--r-- | chrome/common/extensions/docs/build/directory.py | 871 | ||||
-rw-r--r-- | chrome/common/extensions/docs/build/generator.html | 58 |
4 files changed, 0 insertions, 977 deletions
diff --git a/chrome/common/extensions/docs/build/build.py b/chrome/common/extensions/docs/build/build.py deleted file mode 100755 index 26baee8..0000000 --- a/chrome/common/extensions/docs/build/build.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -def main(): - print("build.py is now DEAD. Don't torture yourself with it any more.") - print("") - print("Please see server2/README for the new way to update docs.") - -if __name__ == '__main__': - main() diff --git a/chrome/common/extensions/docs/build/build.sh b/chrome/common/extensions/docs/build/build.sh deleted file mode 100755 index 1624d1e..0000000 --- a/chrome/common/extensions/docs/build/build.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh - -# Copyright (c) 2011 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -BUILD_DIR=$(dirname "$0") - -function depot_tools_error() { - echo "Cannot find depot_tools python - is it installed and in your path?" 1>&2 - exit 1 -} - -if [ "$(uname | cut -b1-6)" == "CYGWIN" ] ; then - # On cygwin, we use the verison of python from depot_tools. - echo "Detected cygwin - looking for python in depot_tools" - GCLIENT_PATH=$(which gclient) - if ! [ -f "$GCLIENT_PATH" ] ; then - depot_tools_error - fi - DEPOT_TOOLS=$(dirname "$GCLIENT_PATH") - PYTHON_PATH="$DEPOT_TOOLS/python.bat" - if ! [ -f "$PYTHON_PATH" ] ; then - depot_tools_error - fi - - # The output from build.py doesn't seem seem to print to the console until - # it's finished, so print a message so people don't think it's hung. - echo "Running - this can take about a minute" - echo "(it goes faster if you have a Release build of DumpRenderTree)" - - $PYTHON_PATH $BUILD_DIR/build.py $* -else - # On all other platforms, we just run the script directly. - $BUILD_DIR/build.py $* -fi diff --git a/chrome/common/extensions/docs/build/directory.py b/chrome/common/extensions/docs/build/directory.py deleted file mode 100644 index 41d3d9e..0000000 --- a/chrome/common/extensions/docs/build/directory.py +++ /dev/null @@ -1,871 +0,0 @@ -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Class for parsing metadata about extension samples.""" - -import locale -import os -import os.path -import re -import hashlib -import zipfile -import sys - -try: - import json -except ImportError: - import simplejson as json - -_script_path = os.path.realpath(__file__) -sys.path.insert(0, os.path.normpath(_script_path + - "/../../../../../../tools")) -import json_comment_eater - -# Make sure we get consistent string sorting behavior by explicitly using the -# default C locale. -locale.setlocale(locale.LC_ALL, 'C') - -import sys -_script_path = os.path.realpath(__file__) -_build_dir = os.path.dirname(_script_path) -_base_dir = os.path.normpath(_build_dir + "/..") -sys.path.insert(0, os.path.normpath(_base_dir + - "/../../../../tools/json_schema_compiler")) -import idl_schema - -def sorted_walk(path): - """ A version of os.walk that yields results in order sorted by name. - - This is to prevent spurious docs changes due to os.walk returning items in a - filesystem dependent order (by inode creation time, etc). - """ - for base, dirs, files in os.walk(path): - dirs.sort() - files.sort() - yield base, dirs, files - -def parse_json_file(path, encoding="utf-8"): - """ Load the specified file and parse it as JSON. - - Args: - path: Path to a file containing JSON-encoded data. - encoding: Encoding used in the file. Defaults to utf-8. - - Returns: - A Python object representing the data encoded in the file. - - Raises: - Exception: If the file could not be read or its contents could not be - parsed as JSON data. - """ - try: - json_file = open(path, 'r') - except IOError, msg: - raise Exception("Failed to read the file at %s: %s" % (path, msg)) - - try: - json_str = json_file.read() - json_obj = json.loads(json_comment_eater.Nom(json_str), encoding) - except ValueError, msg: - raise Exception("Failed to parse JSON out of file %s: %s" % (path, msg)) - finally: - json_file.close() - - return json_obj - -def parse_idl_file(path): - """ Load the specified file and parse it as IDL. - - Args: - path: Path to a file containing JSON-encoded data. - """ - api_def = idl_schema.Load(path) - for namespace_def in api_def: - namespace_dot = namespace_def['namespace'] + '.' - inline_types = dict((type_['id'], type_) - for type_ in namespace_def.get('types', []) - if type_ and type_.get('inline_doc', False)) - def SubstituteInlineDoc(prop): - prop_ref_type = prop.get('$ref', '') - type_obj = inline_types.get(namespace_dot + prop_ref_type, - inline_types.get(prop_ref_type, {})) - if not type_obj: - return - if 'properties' in type_obj: - del prop['$ref'] - prop['properties'] = dict(type_obj['properties']) - prop['type'] = 'object' - for sub_prop in prop['properties'].values(): - if isinstance(sub_prop, dict): - if 'nodoc' in sub_prop: - del sub_prop['nodoc'] - if 'name' in sub_prop: - del sub_prop['name'] - elif 'enum' in type_obj and 'type' in type_obj: - del prop['$ref'] - prop['type'] = type_obj['type'] - prop['enum'] = type_obj['enum'] - def FixReferences(prop): - # Strip namespace_dot from $ref names. - if prop.get('$ref', '').startswith(namespace_dot): - prop['$ref'] = prop['$ref'][len(namespace_dot):] - if (prop.get('type', '') == 'array' and - prop.get('items', {}).get('$ref', '').startswith(namespace_dot)): - prop['items']['$ref'] = prop['items']['$ref'][len(namespace_dot):] - SubstituteInlineDoc(prop) - if 'items' in prop: - SubstituteInlineDoc(prop['items']) - - for type_ in namespace_def.get('types', []): - if type_.get('id', '').startswith(namespace_dot): - type_['id'] = type_['id'][len(namespace_dot):] - for prop in type_.get('properties', {}).values(): - FixReferences(prop) - if type_.get('inline_doc', False): - del type_['inline_doc'] - type_['nodoc'] = True - for func in namespace_def.get('functions', []): - for param in func.get('parameters', []): - FixReferences(param) - for cb_param in param.get('parameters', []): - FixReferences(cb_param) - for event in namespace_def.get('events', []): - for param in event.get('parameters', []): - FixReferences(param) - return api_def - -def write_json_to_file(manifest, path): - """ Writes the contents of this manifest file as a JSON-encoded text file. - - Args: - manifest: The manifest structure to write. - path: The path to write the manifest file to. - - Raises: - Exception: If the file could not be written. - """ - manifest_text = json.dumps(manifest, indent=2, - sort_keys=True, separators=(',', ': ')) - output_path = os.path.realpath(path) - try: - output_file = open(output_path, 'w') - except IOError, msg: - raise Exception("Failed to write the samples manifest file." - "The specific error was: %s." % msg) - output_file.write(manifest_text) - output_file.close() - -class ApiManifest(object): - """ Represents the list of API methods contained in the extension API JSON """ - - def __init__(self, json_paths, idl_paths): - """ Read the supplied json files and idl files and parse their contents. - - Args: - json_paths: Array of paths to .json API schemas. - idl_paths: Array of paths to .idl API schemas. - """ - self._manifest = [] - self._temporary_json_files = [] - for path in json_paths: - self._manifest.extend(parse_json_file(path)) - for path in idl_paths: - module = parse_idl_file(path) - json_path = os.path.realpath(path.replace('.idl', '.json')) - self._temporary_json_files.append((module, json_path)) - self._manifest.extend(module) - - def _parseModuleDocLinksByKeyTypes(self, module, key): - """ - Given a specific API module, returns a dict of methods mapped to - documentation URLs. - - Args: - module: The data in the extension API JSON for a single module. - key: A key belonging to _MODULE_DOC_KEYS to determine which set of - methods to parse, and what kind of documentation URL to generate. - - Returns: - A dict of extension methods mapped to file and hash URL parts for the - corresponding documentation links, like: - { - "chrome.types.clear": "types.html#method-ChromeSetting-clear", - "chrome.types.get": "types.html#method-ChromeSetting-get" - } - - If the API namespace is defined "nodoc" then an empty dict is returned. - """ - api_dict = {} - namespace = module['namespace'] - if self._disableDocs(module): - return api_dict - if not module.has_key('types'): - return api_dict - module_types = module['types'] - for module_type in module_types: - if not module_type.has_key(key): - continue - for method in module_type[key]: - if self._disableDocs(method): - continue - method_name = 'chrome.%s.%s.%s' %\ - (namespace, module_type['id'], method['name']) - hashprefix = 'method' - if key == 'events': - hashprefix = 'event' - api_dict[method_name] = '%s.html#%s-%s-%s' %\ - (namespace, hashprefix, module_type['id'], method['name']) - return api_dict - - def _parseModuleDocLinksByKey(self, module, key): - """ - Given a specific API module, returns a dict of methods or events mapped to - documentation URLs. - - Args: - module: The data in the extension API JSON for a single module. - key: A key belonging to _MODULE_DOC_KEYS to determine which set of - methods to parse, and what kind of documentation URL to generate. - - Returns: - A dict of extension methods mapped to file and hash URL parts for the - corresponding documentation links, like: - { - "chrome.tabs.remove": "tabs.html#method-remove", - "chrome.tabs.onDetached" : "tabs.html#event-onDetatched" - } - - If the API namespace is defined "nodoc" then an empty dict is returned. - - Raises: - Exception: If the key supplied is not a member of _MODULE_DOC_KEYS. - """ - methods = [] - api_dict = {} - namespace = module['namespace'] - if self._disableDocs(module): - return api_dict - if module.has_key(key): - methods.extend(module[key]) - for method in methods: - if self._disableDocs(method): - continue - method_name = 'chrome.%s.%s' % (namespace, method['name']) - hashprefix = 'method' - if key == 'events': - hashprefix = 'event' - api_dict[method_name] = '%s.html#%s-%s' %\ - (namespace, hashprefix, method['name']) - return api_dict - - def getModuleNames(self): - """ Returns the names of individual modules in the API. - - Returns: - The namespace """ - # Exclude modules with documentation disabled. - return set(module['namespace'].encode() for module in self._manifest - if not self._disableDocs(module)) - - def _disableDocs(self, obj): - return 'nodoc' in obj and obj['nodoc'] - - def getDocumentationLinks(self): - """ Parses the extension API JSON manifest and returns a dict of all - events and methods for every module, mapped to relative documentation links. - - Returns: - A dict of methods/events => partial doc links for every module. - """ - api_dict = {} - for module in self._manifest: - api_dict.update(self._parseModuleDocLinksByKey(module, 'functions')) - api_dict.update(self._parseModuleDocLinksByKeyTypes(module, 'functions')) - api_dict.update(self._parseModuleDocLinksByKey(module, 'events')) - api_dict.update(self._parseModuleDocLinksByKeyTypes(module, 'events')) - return api_dict - - def generateJSONFromIDL(self): - """ Writes temporary .json files for every .idl file we have read, for - use by the documentation generator. - """ - for (module, json_path) in self._temporary_json_files: - if os.path.exists(json_path): - print ("WARNING: Overwriting existing file '%s'" - " with generated content." % (json_path)) - write_json_to_file(module, json_path) - - def cleanupGeneratedFiles(self): - """ Removes the temporary .json files we generated from .idl before. - """ - for (module, json_path) in self._temporary_json_files: - os.remove(json_path) - -class SamplesManifest(object): - """ Represents a manifest file containing information about the sample - extensions available in the codebase. """ - - def __init__(self, base_sample_path, base_dir, api_manifest): - """ Reads through the filesystem and obtains information about any Chrome - extensions which exist underneath the specified folder. - - Args: - base_sample_path: The directory under which to search for samples. - base_dir: The base directory samples will be referenced from. - api_manifest: An instance of the ApiManifest class, which will indicate - which API methods are available. - """ - self._base_dir = base_dir - manifest_paths = self._locateManifestsFromPath(base_sample_path) - self._manifest_data = self._parseManifestData(manifest_paths, api_manifest) - - def _locateManifestsFromPath(self, path): - """ - Returns a list of paths to sample extension manifest.json files. - - Args: - base_path: Base path in which to start the search. - Returns: - A list of paths below base_path pointing at manifest.json files. - """ - manifest_paths = [] - for root, directories, files in sorted_walk(path): - if 'manifest.json' in files: - directories = [] # Don't go any further down this tree - manifest_paths.append(os.path.join(root, 'manifest.json')) - if '.svn' in directories: - directories.remove('.svn') # Don't go into SVN metadata directories - return manifest_paths - - def _parseManifestData(self, manifest_paths, api_manifest): - """ Returns metadata about the sample extensions given their manifest - paths. - - Args: - manifest_paths: A list of paths to extension manifests - api_manifest: An instance of the ApiManifest class, which will indicate - which API methods are available. - - Returns: - Manifest data containing a list of samples and available API methods. - """ - api_method_dict = api_manifest.getDocumentationLinks() - api_methods = api_method_dict.keys() - - samples = [] - for path in manifest_paths: - sample = Sample(path, api_methods, self._base_dir) - # Don't render hosted apps - if sample.is_hosted_app() == False: - samples.append(sample) - - def compareSamples(sample1, sample2): - """ Compares two samples as a sort comparator, by name then path. """ - value = cmp(sample1['name'].upper(), sample2['name'].upper()) - if value == 0: - value = cmp(sample1['path'], sample2['path']) - return value - - samples.sort(compareSamples) - - manifest_data = {'samples': samples, 'api': api_method_dict} - return manifest_data - - def writeToFile(self, path): - """ Writes the contents of this manifest file as a JSON-encoded text file. - - Args: - path: The path to write the samples manifest file to. - """ - write_json_to_file(self._manifest_data, path) - - def writeZippedSamples(self): - """ For each sample in the current manifest, create a zip file with the - sample contents in the sample's parent directory if not zip exists, or - update the zip file if the sample has been updated. - - Returns: - A set of paths representing zip files which have been modified. - """ - modified_paths = [] - for sample in self._manifest_data['samples']: - path = sample.write_zip() - if path: - modified_paths.append(path) - return modified_paths - -class Sample(dict): - """ Represents metadata about a Chrome extension sample. - - Extends dict so that it can be easily JSON serialized. - """ - - def __init__(self, manifest_path, api_methods, base_dir): - """ Initializes a Sample instance given a path to a manifest. - - Args: - manifest_path: A filesystem path to a manifest file. - api_methods: A list of strings containing all possible Chrome extension - API calls. - base_dir: The base directory where this sample will be referenced from - - paths will be made relative to this directory. - """ - self._base_dir = base_dir - self._manifest_path = manifest_path - self._manifest = parse_json_file(self._manifest_path) - self._locale_data = self._parse_locale_data() - - # The following calls set data which will be serialized when converting - # this object to JSON. - source_data = self._parse_source_data(api_methods) - self['api_calls'] = source_data['api_calls'] - self['source_files'] = source_data['source_files'] - self['source_hash'] = source_data['source_hash'] - - self['name'] = self._parse_name() - self['description'] = self._parse_description() - self['icon'] = self._parse_icon() - self['features'] = self._parse_features() - self['protocols'] = self._parse_protocols() - self['path'] = self._get_relative_path() - self['search_string'] = self._get_search_string() - self['id'] = hashlib.sha1(self['path']).hexdigest() - self['zip_path'] = self._get_relative_zip_path() - self['crx_path'] = self._get_relative_crx_path() - self['packaged_app'] = self.is_packaged_app() - - _FEATURE_ATTRIBUTES = ( - 'browser_action', - 'page_action', - 'background_page', - 'options_page', - 'plugins', - 'theme', - 'chrome_url_overrides', - 'devtools_page' - ) - """ Attributes that will map to "features" if their corresponding key is - present in the extension manifest. """ - - _SOURCE_FILE_EXTENSIONS = ('.html', '.json', '.js', '.css', '.htm') - """ File extensions to files which may contain source code.""" - - _ENGLISH_LOCALES = ['en_US', 'en', 'en_GB'] - """ Locales from which translations may be used in the sample gallery. """ - - def _get_localized_manifest_value(self, key): - """ Returns a localized version of the requested manifest value. - - Args: - key: The manifest key whose value the caller wants translated. - - Returns: - If the supplied value exists and contains a ___MSG_token___ value, this - method will resolve the appropriate translation and return the result. - If no token exists, the manifest value will be returned. If the key does - not exist, an empty string will be returned. - - Raises: - Exception: If the localized value for the given token could not be found. - """ - if self._manifest.has_key(key): - if self._manifest[key][:6] == '__MSG_': - try: - return self._get_localized_value(self._manifest[key]) - except Exception, msg: - raise Exception("Could not translate manifest value for key %s: %s" % - (key, msg)) - else: - return self._manifest[key] - else: - return '' - - def _get_localized_value(self, message_token): - """ Returns the localized version of the requested MSG bundle token. - - Args: - message_token: A message bundle token like __MSG_extensionName__. - - Returns: - The translated text corresponding to the token, with any placeholders - automatically resolved and substituted in. - - Raises: - Exception: If a message bundle token is not found in the translations. - """ - placeholder_pattern = re.compile('\$(\w*)\$') - token = message_token[6:-2] - if self._locale_data.has_key(token): - message = self._locale_data[token]['message'] - - placeholder_match = placeholder_pattern.search(message) - if placeholder_match: - # There are placeholders in the translation - substitute them. - placeholder_name = placeholder_match.group(1) - placeholders = self._locale_data[token]['placeholders'] - if placeholders.has_key(placeholder_name.lower()): - placeholder_value = placeholders[placeholder_name.lower()]['content'] - placeholder_token = '$%s$' % placeholder_name - message = message.replace(placeholder_token, placeholder_value) - return message - else: - raise Exception('Could not find localized string: %s' % message_token) - - def _get_relative_path(self): - """ Returns a relative path from the supplied base dir to the manifest dir. - - This method is used because we may not be able to rely on os.path.relpath - which was introduced in Python 2.6 and only works on Windows and Unix. - - Since the example extensions should always be subdirectories of the - base sample manifest path, we can get a relative path through a simple - string substitution. - - Returns: - A relative directory path from the sample manifest's directory to the - directory containing this sample's manifest.json. - """ - real_manifest_path = os.path.realpath(self._manifest_path) - real_base_path = os.path.realpath(self._base_dir) - return real_manifest_path.replace(real_base_path, '')\ - .replace('manifest.json', '')[1:] - - def _get_relative_zip_path(self): - """ Returns a relative path from the base dir to the sample's zip file. - - Intended for locating the zip file for the sample in the samples manifest. - - Returns: - A relative directory path form the sample manifest's directory to this - sample's zip file. - """ - zip_filename = self._get_zip_filename() - zip_relpath = os.path.dirname(os.path.dirname(self._get_relative_path())) - return os.path.join(zip_relpath, zip_filename) - - def _get_relative_crx_path(self): - """ Returns a relative path from the base dir to the sample's crx file. - - Note: .crx files are provided manually and may or may not exist. - - Returns: - If the .crx file exists, the relative directory path from the sample's - manifest directory to this sample's .crx files. - - Otherwise, None. - """ - crx_filename = self._get_crx_filename() - crx_relroot = os.path.dirname(os.path.dirname(self._get_relative_path())) - crx_relpath = os.path.join(crx_relroot, crx_filename) - crx_absroot = os.path.dirname(os.path.dirname(self._manifest_path)) - crx_abspath = os.path.join(crx_absroot, crx_filename) - return os.path.isfile(crx_abspath) and crx_relpath or None - - - def _get_search_string(self): - """ Constructs a string to be used when searching the samples list. - - To make the implementation of the JavaScript-based search very direct, a - string is constructed containing the title, description, API calls, and - features that this sample uses, and is converted to uppercase. This makes - JavaScript sample searching very fast and easy to implement. - - Returns: - An uppercase string containing information to match on for searching - samples on the client. - """ - search_terms = [ - self['name'], - self['description'], - ] - search_terms.extend(self['features']) - search_terms.extend(self['api_calls']) - search_string = ' '.join(search_terms).replace('"', '')\ - .replace('\'', '')\ - .upper() - return search_string - - def _get_zip_filename(self): - """ Returns the filename to be used for a generated zip of the sample. - - Returns: - A string in the form of "<dirname>.zip" where <dirname> is the name - of the directory containing this sample's manifest.json. - """ - sample_path = os.path.realpath(os.path.dirname(self._manifest_path)) - sample_dirname = os.path.basename(sample_path) - return "%s.zip" % sample_dirname - - def _get_crx_filename(self): - """ Returns the filename to be used for a generated zip of the sample. - - Returns: - A string in the form of "<dirname>.zip" where <dirname> is the name - of the directory containing this sample's manifest.json. - """ - sample_path = os.path.realpath(os.path.dirname(self._manifest_path)) - sample_dirname = os.path.basename(sample_path) - return "%s.crx" % sample_dirname - - def _parse_description(self): - """ Returns a localized description of the extension. - - Returns: - A localized version of the sample's description. - """ - return self._get_localized_manifest_value('description') - - def _parse_features(self): - """ Returns a list of features the sample uses. - - Returns: - A list of features the extension uses, as determined by - self._FEATURE_ATTRIBUTES. - """ - features = set() - for feature_attr in self._FEATURE_ATTRIBUTES: - if self._manifest.has_key(feature_attr): - features.add(feature_attr) - - if self._manifest.has_key('background'): - features.add('background_page') - - if self._uses_popup(): - features.add('popup') - - if self._manifest.has_key('permissions'): - for permission in self._manifest['permissions']: - split = permission.split('://') - if (len(split) == 1): - features.add(split[0]) - return sorted(features) - - def _parse_icon(self): - """ Returns the path to the 128px icon for this sample. - - Returns: - The path to the 128px icon if defined in the manifest, None otherwise. - """ - if (self._manifest.has_key('icons') and - self._manifest['icons'].has_key('128')): - return self._manifest['icons']['128'] - else: - return None - - def _parse_locale_data(self): - """ Parses this sample's locale data into a dict. - - Because the sample gallery is in English, this method only looks for - translations as defined by self._ENGLISH_LOCALES. - - Returns: - A dict containing the translation keys and corresponding English text - for this extension. - - Raises: - Exception: If the messages file cannot be read, or if it is improperly - formatted JSON. - """ - en_messages = {} - extension_dir_path = os.path.dirname(self._manifest_path) - for locale in self._ENGLISH_LOCALES: - en_messages_path = os.path.join(extension_dir_path, '_locales', locale, - 'messages.json') - if (os.path.isfile(en_messages_path)): - break - - if (os.path.isfile(en_messages_path)): - try: - en_messages_file = open(en_messages_path, 'r') - except IOError, msg: - raise Exception("Failed to read %s: %s" % (en_messages_path, msg)) - en_messages_contents = en_messages_file.read() - en_messages_file.close() - try: - en_messages = json.loads(en_messages_contents) - except ValueError, msg: - raise Exception("File %s has a syntax error: %s" % - (en_messages_path, msg)) - return en_messages - - def _parse_name(self): - """ Returns a localized name for the extension. - - Returns: - A localized version of the sample's name. - """ - return self._get_localized_manifest_value('name') - - def _parse_protocols(self): - """ Returns a list of protocols this extension requests permission for. - - Returns: - A list of every unique protocol listed in the manifest's permssions. - """ - protocols = [] - if self._manifest.has_key('permissions'): - for permission in self._manifest['permissions']: - split = permission.split('://') - if (len(split) == 2) and (split[0] not in protocols): - protocols.append(split[0] + "://") - return protocols - - def _parse_source_data(self, api_methods): - """ Iterates over the sample's source files and parses data from them. - - Parses any files in the sample directory with known source extensions - (as defined in self._SOURCE_FILE_EXTENSIONS). For each file, this method: - - 1. Stores a relative path from the manifest.json directory to the file. - 2. Searches through the contents of the file for chrome.* API calls. - 3. Calculates a SHA1 digest for the contents of the file. - - Args: - api_methods: A list of strings containing the potential - API calls the and the extension sample could be making. - - Raises: - Exception: If any of the source files cannot be read. - - Returns: - A dictionary containing the keys/values: - 'api_calls' A sorted list of API calls the sample makes. - 'source_files' A sorted list of paths to files the extension uses. - 'source_hash' A hash of the individual file hashes. - """ - data = {} - source_paths = [] - source_hashes = [] - api_calls = set() - base_path = os.path.realpath(os.path.dirname(self._manifest_path)) - for root, directories, files in sorted_walk(base_path): - if '.svn' in directories: - directories.remove('.svn') # Don't go into SVN metadata directories - - for file_name in files: - ext = os.path.splitext(file_name)[1] - if ext in self._SOURCE_FILE_EXTENSIONS: - # Add the file path to the list of source paths. - fullpath = os.path.realpath(os.path.join(root, file_name)) - path = fullpath.replace(base_path, '')[1:] - source_paths.append(path) - - # Read the contents and parse out API calls. - try: - code_file = open(fullpath, "r") - except IOError, msg: - raise Exception("Failed to read %s: %s" % (fullpath, msg)) - code_contents = unicode(code_file.read(), errors="replace") - code_file.close() - for method in api_methods: - if (code_contents.find(method) > -1): - api_calls.add(method) - - # Get a hash of the file contents for zip file generation. - hash = hashlib.sha1(code_contents.encode("ascii", "replace")) - source_hashes.append(hash.hexdigest()) - - data['api_calls'] = sorted(api_calls) - data['source_files'] = sorted(source_paths) - data['source_hash'] = hashlib.sha1(''.join(source_hashes)).hexdigest() - return data - - def _uses_background(self): - """ Returns true if the extension defines a background page. """ - return self._manifest.has_key('background_page') - - def _uses_browser_action(self): - """ Returns true if the extension defines a browser action. """ - return self._manifest.has_key('browser_action') - - def _uses_content_scripts(self): - """ Returns true if the extension uses content scripts. """ - return self._manifest.has_key('content_scripts') - - def _uses_options(self): - """ Returns true if the extension defines an options page. """ - return self._manifest.has_key('options_page') - - def _uses_page_action(self): - """ Returns true if the extension uses a page action. """ - return self._manifest.has_key('page_action') - - def _uses_popup(self): - """ Returns true if the extension defines a popup on a page or browser - action. """ - has_b_popup = (self._uses_browser_action() and - self._manifest['browser_action'].has_key('popup')) - has_p_popup = (self._uses_page_action() and - self._manifest['page_action'].has_key('popup')) - return has_b_popup or has_p_popup - - def is_hosted_app(self): - """ Returns true if the manifest has an app but not a local_path (that's a - packaged app) nor a background (that's a platform app).""" - return (self._manifest.has_key('app') and - (not self._manifest['app'].has_key('launch') or - not self._manifest['app']['launch'].has_key('local_path')) and - not self._manifest['app'].has_key('background')) - - def is_packaged_app(self): - """ Returns true if the manifest has an app/launch/local_path section.""" - return (self._manifest.has_key('app') and - self._manifest['app'].has_key('launch') and - self._manifest['app']['launch'].has_key('local_path')) - - def write_zip(self): - """ Writes a zip file containing all of the files in this Sample's dir.""" - sample_path = os.path.realpath(os.path.dirname(self._manifest_path)) - sample_dirname = os.path.basename(sample_path) - sample_parentpath = os.path.dirname(sample_path) - - zip_filename = self._get_zip_filename() - zip_path = os.path.join(sample_parentpath, zip_filename) - # we pass zip_manifest_path to zipfile.getinfo(), which chokes on - # backslashes, so don't rely on os.path.join, use forward slash on - # all platforms. - zip_manifest_path = sample_dirname + '/manifest.json' - - zipfile.ZipFile.debug = 3 - - if os.path.isfile(zip_path): - try: - old_zip_file = zipfile.ZipFile(zip_path, 'r') - except IOError, msg: - raise Exception("Could not read zip at %s: %s" % (zip_path, msg)) - except zipfile.BadZipfile, msg: - raise Exception("File at %s is not a zip file: %s" % (zip_path, msg)) - - try: - info = old_zip_file.getinfo(zip_manifest_path) - hash = info.comment - if hash == self['source_hash']: - return None # Hashes match - no need to generate file - except KeyError, msg: - pass # The old zip file doesn't contain a hash - overwrite - finally: - old_zip_file.close() - - zip_file = zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) - - try: - for root, dirs, files in sorted_walk(sample_path): - if '.svn' in dirs: - dirs.remove('.svn') - for file in files: - # Absolute path to the file to be added. - abspath = os.path.realpath(os.path.join(root, file)) - # Relative path to store the file in under the zip. - relpath = sample_dirname + abspath.replace(sample_path, "") - - zip_file.write(abspath, relpath) - if file == 'manifest.json': - info = zip_file.getinfo(zip_manifest_path) - info.comment = self['source_hash'] - except RuntimeError, msg: - raise Exception("Could not write zip at %s: %s" % (zip_path, msg)) - finally: - zip_file.close() - - return self._get_relative_zip_path() diff --git a/chrome/common/extensions/docs/build/generator.html b/chrome/common/extensions/docs/build/generator.html deleted file mode 100644 index f7c4a8c..0000000 --- a/chrome/common/extensions/docs/build/generator.html +++ /dev/null @@ -1,58 +0,0 @@ -<html> - <head> - <script> -var childFrame; -var family; -var pages; -var currentPage; -var result = {}; - -function init() { - if (window.testRunner) { - testRunner.dumpAsText(); - testRunner.waitUntilDone(); - } else { - console.error("No layout test controller"); - } - - var queryString = window.location.search.substring(1).split("|"); - family = queryString[0]; - pages = queryString[1].split(","); - if (!pages.length) { - alert("please specify which pages to generate via " + - "?<page> ... ie. generator.html?tabs,bookmarks,cookies,..."); - } - - childFrame = document.createElement('iframe'); - childFrame.style.display = "none"; - document.body.appendChild(childFrame); - - next(); -} - -function next() { - currentPage = pages.shift(); - childFrame.src = "../" + family + "/" + currentPage + ".html?regenerate"; -} - -var count = 0; -function done() { - result[currentPage] = childFrame.contentWindow.serializePage(); - - if (pages.length) { - next(); - } else { - var preNode = document.createElement("pre"); - var textNode = document.createTextNode( - "#BEGIN" + JSON.stringify(result) + "#END"); - preNode.appendChild(textNode); - document.getElementsByTagName("body")[0].appendChild(preNode); - if (window.testRunner) - testRunner.notifyDone(); - } -} - </script> - </head> - <body onload="init();"> - </body> -</html> |