summaryrefslogtreecommitdiffstats
path: root/extensions/browser/updater
diff options
context:
space:
mode:
authorrockot <rockot@chromium.org>2014-10-16 12:40:25 -0700
committerCommit bot <commit-bot@chromium.org>2014-10-16 19:41:01 +0000
commit05f40a92987303ac217e68cec183cf9d19ffa290 (patch)
tree641b0caa6da225a22aa2beaff32499b8652c7ae3 /extensions/browser/updater
parentc43d5a1de9f91fe232d3bffefcb02e0cc1e65ab9 (diff)
downloadchromium_src-05f40a92987303ac217e68cec183cf9d19ffa290.zip
chromium_src-05f40a92987303ac217e68cec183cf9d19ffa290.tar.gz
chromium_src-05f40a92987303ac217e68cec183cf9d19ffa290.tar.bz2
Move ExtensionDownloader to //extensions
This is a mechanical move with opportunistic clang_format applied for cleanup. This also introduces a dependency on //google_apis within //extensions/browser. BUG=398671 Review URL: https://codereview.chromium.org/654363002 Cr-Commit-Position: refs/heads/master@{#299957}
Diffstat (limited to 'extensions/browser/updater')
-rw-r--r--extensions/browser/updater/extension_downloader.cc972
-rw-r--r--extensions/browser/updater/extension_downloader.h334
-rw-r--r--extensions/browser/updater/extension_downloader_delegate.cc44
-rw-r--r--extensions/browser/updater/extension_downloader_delegate.h124
4 files changed, 1474 insertions, 0 deletions
diff --git a/extensions/browser/updater/extension_downloader.cc b/extensions/browser/updater/extension_downloader.cc
new file mode 100644
index 0000000..8021944
--- /dev/null
+++ b/extensions/browser/updater/extension_downloader.cc
@@ -0,0 +1,972 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "extensions/browser/updater/extension_downloader.h"
+
+#include <utility>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/files/file_path.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/metrics/histogram.h"
+#include "base/metrics/sparse_histogram.h"
+#include "base/profiler/scoped_profile.h"
+#include "base/stl_util.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
+#include "base/time/time.h"
+#include "base/version.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/browser/notification_details.h"
+#include "content/public/browser/notification_service.h"
+#include "extensions/browser/extensions_browser_client.h"
+#include "extensions/browser/notification_types.h"
+#include "extensions/browser/updater/extension_cache.h"
+#include "extensions/browser/updater/request_queue_impl.h"
+#include "extensions/browser/updater/safe_manifest_parser.h"
+#include "extensions/common/extension_urls.h"
+#include "extensions/common/manifest_url_handlers.h"
+#include "google_apis/gaia/identity_provider.h"
+#include "net/base/backoff_entry.h"
+#include "net/base/load_flags.h"
+#include "net/base/net_errors.h"
+#include "net/http/http_request_headers.h"
+#include "net/http/http_status_code.h"
+#include "net/url_request/url_fetcher.h"
+#include "net/url_request/url_request_context_getter.h"
+#include "net/url_request/url_request_status.h"
+
+using base::Time;
+using base::TimeDelta;
+using content::BrowserThread;
+
+namespace extensions {
+
+const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
+
+namespace {
+
+const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
+ // Number of initial errors (in sequence) to ignore before applying
+ // exponential back-off rules.
+ 0,
+
+ // Initial delay for exponential back-off in ms.
+ 2000,
+
+ // Factor by which the waiting time will be multiplied.
+ 2,
+
+ // Fuzzing percentage. ex: 10% will spread requests randomly
+ // between 90%-100% of the calculated time.
+ 0.1,
+
+ // Maximum amount of time we are willing to delay our request in ms.
+ -1,
+
+ // Time to keep an entry from being discarded even when it
+ // has no significant state, -1 to never discard.
+ -1,
+
+ // Don't use initial delay unless the last request was an error.
+ false,
+};
+
+const char kAuthUserQueryKey[] = "authuser";
+
+const int kMaxAuthUserValue = 10;
+const int kMaxOAuth2Attempts = 3;
+
+const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
+const char kDefaultInstallSource[] = "";
+
+const char kGoogleDotCom[] = "google.com";
+const char kTokenServiceConsumerId[] = "extension_downloader";
+const char kWebstoreOAuth2Scope[] =
+ "https://www.googleapis.com/auth/chromewebstore.readonly";
+
+#define RETRY_HISTOGRAM(name, retry_count, url) \
+ if ((url).DomainIs(kGoogleDotCom)) { \
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
+ retry_count, \
+ 1, \
+ kMaxRetries, \
+ kMaxRetries + 1); \
+ } else { \
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
+ retry_count, \
+ 1, \
+ kMaxRetries, \
+ kMaxRetries + 1); \
+ }
+
+bool ShouldRetryRequest(const net::URLRequestStatus& status,
+ int response_code) {
+ // Retry if the response code is a server error, or the request failed because
+ // of network errors as opposed to file errors.
+ return ((response_code >= 500 && status.is_success()) ||
+ status.status() == net::URLRequestStatus::FAILED);
+}
+
+// This parses and updates a URL query such that the value of the |authuser|
+// query parameter is incremented by 1. If parameter was not present in the URL,
+// it will be added with a value of 1. All other query keys and values are
+// preserved as-is. Returns |false| if the user index exceeds a hard-coded
+// maximum.
+bool IncrementAuthUserIndex(GURL* url) {
+ int user_index = 0;
+ std::string old_query = url->query();
+ std::vector<std::string> new_query_parts;
+ url::Component query(0, old_query.length());
+ url::Component key, value;
+ while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
+ std::string key_string = old_query.substr(key.begin, key.len);
+ std::string value_string = old_query.substr(value.begin, value.len);
+ if (key_string == kAuthUserQueryKey) {
+ base::StringToInt(value_string, &user_index);
+ } else {
+ new_query_parts.push_back(base::StringPrintf(
+ "%s=%s", key_string.c_str(), value_string.c_str()));
+ }
+ }
+ if (user_index >= kMaxAuthUserValue)
+ return false;
+ new_query_parts.push_back(
+ base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
+ std::string new_query_string = JoinString(new_query_parts, '&');
+ url::Component new_query(0, new_query_string.size());
+ url::Replacements<char> replacements;
+ replacements.SetQuery(new_query_string.c_str(), new_query);
+ *url = url->ReplaceComponents(replacements);
+ return true;
+}
+
+} // namespace
+
+UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
+ : id(id), version(version) {
+}
+
+UpdateDetails::~UpdateDetails() {
+}
+
+ExtensionDownloader::ExtensionFetch::ExtensionFetch()
+ : url(), credentials(CREDENTIALS_NONE) {
+}
+
+ExtensionDownloader::ExtensionFetch::ExtensionFetch(
+ const std::string& id,
+ const GURL& url,
+ const std::string& package_hash,
+ const std::string& version,
+ const std::set<int>& request_ids)
+ : id(id),
+ url(url),
+ package_hash(package_hash),
+ version(version),
+ request_ids(request_ids),
+ credentials(CREDENTIALS_NONE),
+ oauth2_attempt_count(0) {
+}
+
+ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {
+}
+
+ExtensionDownloader::ExtensionDownloader(
+ ExtensionDownloaderDelegate* delegate,
+ net::URLRequestContextGetter* request_context)
+ : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
+ delegate_(delegate),
+ request_context_(request_context),
+ manifests_queue_(&kDefaultBackoffPolicy,
+ base::Bind(&ExtensionDownloader::CreateManifestFetcher,
+ base::Unretained(this))),
+ extensions_queue_(&kDefaultBackoffPolicy,
+ base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
+ base::Unretained(this))),
+ extension_cache_(NULL),
+ enable_extra_update_metrics_(false),
+ weak_ptr_factory_(this) {
+ DCHECK(delegate_);
+ DCHECK(request_context_.get());
+}
+
+ExtensionDownloader::~ExtensionDownloader() {
+}
+
+bool ExtensionDownloader::AddExtension(const Extension& extension,
+ int request_id) {
+ // Skip extensions with empty update URLs converted from user
+ // scripts.
+ if (extension.converted_from_user_script() &&
+ ManifestURL::GetUpdateURL(&extension).is_empty()) {
+ return false;
+ }
+
+ // If the extension updates itself from the gallery, ignore any update URL
+ // data. At the moment there is no extra data that an extension can
+ // communicate to the the gallery update servers.
+ std::string update_url_data;
+ if (!ManifestURL::UpdatesFromGallery(&extension))
+ update_url_data = delegate_->GetUpdateUrlData(extension.id());
+
+ std::string install_source;
+ bool force_update =
+ delegate_->ShouldForceUpdate(extension.id(), &install_source);
+ return AddExtensionData(extension.id(),
+ *extension.version(),
+ extension.GetType(),
+ ManifestURL::GetUpdateURL(&extension),
+ update_url_data,
+ request_id,
+ force_update,
+ install_source);
+}
+
+bool ExtensionDownloader::AddPendingExtension(const std::string& id,
+ const GURL& update_url,
+ int request_id) {
+ // Use a zero version to ensure that a pending extension will always
+ // be updated, and thus installed (assuming all extensions have
+ // non-zero versions).
+ Version version("0.0.0.0");
+ DCHECK(version.IsValid());
+
+ return AddExtensionData(id,
+ version,
+ Manifest::TYPE_UNKNOWN,
+ update_url,
+ std::string(),
+ request_id,
+ false,
+ std::string());
+}
+
+void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
+ if (cache) {
+ extension_cache_ = cache;
+ extension_cache_->Start(base::Bind(&ExtensionDownloader::DoStartAllPending,
+ weak_ptr_factory_.GetWeakPtr()));
+ } else {
+ DoStartAllPending();
+ }
+}
+
+void ExtensionDownloader::DoStartAllPending() {
+ ReportStats();
+ url_stats_ = URLStats();
+
+ for (FetchMap::iterator it = fetches_preparing_.begin();
+ it != fetches_preparing_.end();
+ ++it) {
+ std::vector<linked_ptr<ManifestFetchData>>& list = it->second;
+ for (size_t i = 0; i < list.size(); ++i) {
+ StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
+ }
+ }
+ fetches_preparing_.clear();
+}
+
+void ExtensionDownloader::StartBlacklistUpdate(
+ const std::string& version,
+ const ManifestFetchData::PingData& ping_data,
+ int request_id) {
+ // Note: it is very important that we use the https version of the update
+ // url here to avoid DNS hijacking of the blacklist, which is not validated
+ // by a public key signature like .crx files are.
+ scoped_ptr<ManifestFetchData> blacklist_fetch(CreateManifestFetchData(
+ extension_urls::GetWebstoreUpdateUrl(), request_id));
+ DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
+ blacklist_fetch->AddExtension(kBlacklistAppID,
+ version,
+ &ping_data,
+ std::string(),
+ kDefaultInstallSource,
+ false);
+ StartUpdateCheck(blacklist_fetch.Pass());
+}
+
+void ExtensionDownloader::SetWebstoreIdentityProvider(
+ scoped_ptr<IdentityProvider> identity_provider) {
+ identity_provider_.swap(identity_provider);
+}
+
+bool ExtensionDownloader::AddExtensionData(
+ const std::string& id,
+ const Version& version,
+ Manifest::Type extension_type,
+ const GURL& extension_update_url,
+ const std::string& update_url_data,
+ int request_id,
+ bool force_update,
+ const std::string& install_source_override) {
+ GURL update_url(extension_update_url);
+ // Skip extensions with non-empty invalid update URLs.
+ if (!update_url.is_empty() && !update_url.is_valid()) {
+ LOG(WARNING) << "Extension " << id << " has invalid update url "
+ << update_url;
+ return false;
+ }
+
+ // Make sure we use SSL for store-hosted extensions.
+ if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
+ !update_url.SchemeIsSecure())
+ update_url = extension_urls::GetWebstoreUpdateUrl();
+
+ // Skip extensions with empty IDs.
+ if (id.empty()) {
+ LOG(WARNING) << "Found extension with empty ID";
+ return false;
+ }
+
+ if (update_url.DomainIs(kGoogleDotCom)) {
+ url_stats_.google_url_count++;
+ } else if (update_url.is_empty()) {
+ url_stats_.no_url_count++;
+ // Fill in default update URL.
+ update_url = extension_urls::GetWebstoreUpdateUrl();
+ } else {
+ url_stats_.other_url_count++;
+ }
+
+ switch (extension_type) {
+ case Manifest::TYPE_THEME:
+ ++url_stats_.theme_count;
+ break;
+ case Manifest::TYPE_EXTENSION:
+ case Manifest::TYPE_USER_SCRIPT:
+ ++url_stats_.extension_count;
+ break;
+ case Manifest::TYPE_HOSTED_APP:
+ case Manifest::TYPE_LEGACY_PACKAGED_APP:
+ ++url_stats_.app_count;
+ break;
+ case Manifest::TYPE_PLATFORM_APP:
+ ++url_stats_.platform_app_count;
+ break;
+ case Manifest::TYPE_UNKNOWN:
+ default:
+ ++url_stats_.pending_count;
+ break;
+ }
+
+ std::vector<GURL> update_urls;
+ update_urls.push_back(update_url);
+ // If metrics are enabled, also add to ManifestFetchData for the
+ // webstore update URL.
+ if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
+ enable_extra_update_metrics_) {
+ update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
+ }
+
+ for (size_t i = 0; i < update_urls.size(); ++i) {
+ DCHECK(!update_urls[i].is_empty());
+ DCHECK(update_urls[i].is_valid());
+
+ std::string install_source =
+ i == 0 ? kDefaultInstallSource : kNotFromWebstoreInstallSource;
+ if (!install_source_override.empty()) {
+ install_source = install_source_override;
+ }
+
+ ManifestFetchData::PingData ping_data;
+ ManifestFetchData::PingData* optional_ping_data = NULL;
+ if (delegate_->GetPingDataForExtension(id, &ping_data))
+ optional_ping_data = &ping_data;
+
+ // Find or create a ManifestFetchData to add this extension to.
+ bool added = false;
+ FetchMap::iterator existing_iter =
+ fetches_preparing_.find(std::make_pair(request_id, update_urls[i]));
+ if (existing_iter != fetches_preparing_.end() &&
+ !existing_iter->second.empty()) {
+ // Try to add to the ManifestFetchData at the end of the list.
+ ManifestFetchData* existing_fetch = existing_iter->second.back().get();
+ if (existing_fetch->AddExtension(id,
+ version.GetString(),
+ optional_ping_data,
+ update_url_data,
+ install_source,
+ force_update)) {
+ added = true;
+ }
+ }
+ if (!added) {
+ // Otherwise add a new element to the list, if the list doesn't exist or
+ // if its last element is already full.
+ linked_ptr<ManifestFetchData> fetch(
+ CreateManifestFetchData(update_urls[i], request_id));
+ fetches_preparing_[std::make_pair(request_id, update_urls[i])].push_back(
+ fetch);
+ added = fetch->AddExtension(id,
+ version.GetString(),
+ optional_ping_data,
+ update_url_data,
+ install_source,
+ force_update);
+ DCHECK(added);
+ }
+ }
+
+ return true;
+}
+
+void ExtensionDownloader::ReportStats() const {
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
+ url_stats_.extension_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
+ url_stats_.theme_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp", url_stats_.app_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
+ url_stats_.platform_app_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
+ url_stats_.pending_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
+ url_stats_.google_url_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
+ url_stats_.other_url_count);
+ UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
+ url_stats_.no_url_count);
+}
+
+void ExtensionDownloader::StartUpdateCheck(
+ scoped_ptr<ManifestFetchData> fetch_data) {
+ const std::set<std::string>& id_set(fetch_data->extension_ids());
+
+ if (!ExtensionsBrowserClient::Get()->IsBackgroundUpdateAllowed()) {
+ NotifyExtensionsDownloadFailed(id_set,
+ fetch_data->request_ids(),
+ ExtensionDownloaderDelegate::DISABLED);
+ }
+
+ RequestQueue<ManifestFetchData>::iterator i;
+ for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
+ if (fetch_data->full_url() == i->full_url()) {
+ // This url is already scheduled to be fetched.
+ i->Merge(*fetch_data);
+ return;
+ }
+ }
+
+ if (manifests_queue_.active_request() &&
+ manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
+ manifests_queue_.active_request()->Merge(*fetch_data);
+ } else {
+ UMA_HISTOGRAM_COUNTS(
+ "Extensions.UpdateCheckUrlLength",
+ fetch_data->full_url().possibly_invalid_spec().length());
+
+ manifests_queue_.ScheduleRequest(fetch_data.Pass());
+ }
+}
+
+void ExtensionDownloader::CreateManifestFetcher() {
+ if (VLOG_IS_ON(2)) {
+ std::vector<std::string> id_vector(
+ manifests_queue_.active_request()->extension_ids().begin(),
+ manifests_queue_.active_request()->extension_ids().end());
+ std::string id_list = JoinString(id_vector, ',');
+ VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
+ << " for " << id_list;
+ }
+
+ manifest_fetcher_.reset(
+ net::URLFetcher::Create(kManifestFetcherId,
+ manifests_queue_.active_request()->full_url(),
+ net::URLFetcher::GET,
+ this));
+ manifest_fetcher_->SetRequestContext(request_context_.get());
+ manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
+ net::LOAD_DO_NOT_SAVE_COOKIES |
+ net::LOAD_DISABLE_CACHE);
+ // Update checks can be interrupted if a network change is detected; this is
+ // common for the retail mode AppPack on ChromeOS. Retrying once should be
+ // enough to recover in those cases; let the fetcher retry up to 3 times
+ // just in case. http://crosbug.com/130602
+ manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
+ manifest_fetcher_->Start();
+}
+
+void ExtensionDownloader::OnURLFetchComplete(const net::URLFetcher* source) {
+ // TODO(vadimt): Remove ScopedProfile below once crbug.com/422577 is fixed.
+ tracked_objects::ScopedProfile tracking_profile(
+ FROM_HERE_WITH_EXPLICIT_FUNCTION(
+ "422577 ExtensionDownloader::OnURLFetchComplete"));
+
+ VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
+
+ if (source == manifest_fetcher_.get()) {
+ std::string data;
+ source->GetResponseAsString(&data);
+ OnManifestFetchComplete(source->GetURL(),
+ source->GetStatus(),
+ source->GetResponseCode(),
+ source->GetBackoffDelay(),
+ data);
+ } else if (source == extension_fetcher_.get()) {
+ OnCRXFetchComplete(source,
+ source->GetURL(),
+ source->GetStatus(),
+ source->GetResponseCode(),
+ source->GetBackoffDelay());
+ } else {
+ NOTREACHED();
+ }
+}
+
+void ExtensionDownloader::OnManifestFetchComplete(
+ const GURL& url,
+ const net::URLRequestStatus& status,
+ int response_code,
+ const base::TimeDelta& backoff_delay,
+ const std::string& data) {
+ // We want to try parsing the manifest, and if it indicates updates are
+ // available, we want to fire off requests to fetch those updates.
+ if (status.status() == net::URLRequestStatus::SUCCESS &&
+ (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
+ RETRY_HISTOGRAM("ManifestFetchSuccess",
+ manifests_queue_.active_request_failure_count(),
+ url);
+ VLOG(2) << "beginning manifest parse for " << url;
+ scoped_refptr<SafeManifestParser> safe_parser(new SafeManifestParser(
+ data,
+ manifests_queue_.reset_active_request().release(),
+ base::Bind(&ExtensionDownloader::HandleManifestResults,
+ weak_ptr_factory_.GetWeakPtr())));
+ safe_parser->Start();
+ } else {
+ VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
+ << "' response code:" << response_code;
+ if (ShouldRetryRequest(status, response_code) &&
+ manifests_queue_.active_request_failure_count() < kMaxRetries) {
+ manifests_queue_.RetryRequest(backoff_delay);
+ } else {
+ RETRY_HISTOGRAM("ManifestFetchFailure",
+ manifests_queue_.active_request_failure_count(),
+ url);
+ NotifyExtensionsDownloadFailed(
+ manifests_queue_.active_request()->extension_ids(),
+ manifests_queue_.active_request()->request_ids(),
+ ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
+ }
+ }
+ manifest_fetcher_.reset();
+ manifests_queue_.reset_active_request();
+
+ // If we have any pending manifest requests, fire off the next one.
+ manifests_queue_.StartNextRequest();
+}
+
+void ExtensionDownloader::HandleManifestResults(
+ const ManifestFetchData& fetch_data,
+ const UpdateManifest::Results* results) {
+ // Keep a list of extensions that will not be updated, so that the |delegate_|
+ // can be notified once we're done here.
+ std::set<std::string> not_updated(fetch_data.extension_ids());
+
+ if (!results) {
+ NotifyExtensionsDownloadFailed(
+ not_updated,
+ fetch_data.request_ids(),
+ ExtensionDownloaderDelegate::MANIFEST_INVALID);
+ return;
+ }
+
+ // Examine the parsed manifest and kick off fetches of any new crx files.
+ std::vector<int> updates;
+ DetermineUpdates(fetch_data, *results, &updates);
+ for (size_t i = 0; i < updates.size(); i++) {
+ const UpdateManifest::Result* update = &(results->list.at(updates[i]));
+ const std::string& id = update->extension_id;
+ not_updated.erase(id);
+
+ GURL crx_url = update->crx_url;
+ if (id != kBlacklistAppID) {
+ NotifyUpdateFound(update->extension_id, update->version);
+ } else {
+ // The URL of the blacklist file is returned by the server and we need to
+ // be sure that we continue to be able to reliably detect whether a URL
+ // references a blacklist file.
+ DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
+
+ // Force https (crbug.com/129587).
+ if (!crx_url.SchemeIsSecure()) {
+ url::Replacements<char> replacements;
+ std::string scheme("https");
+ replacements.SetScheme(scheme.c_str(),
+ url::Component(0, scheme.size()));
+ crx_url = crx_url.ReplaceComponents(replacements);
+ }
+ }
+ scoped_ptr<ExtensionFetch> fetch(
+ new ExtensionFetch(update->extension_id,
+ crx_url,
+ update->package_hash,
+ update->version,
+ fetch_data.request_ids()));
+ FetchUpdatedExtension(fetch.Pass());
+ }
+
+ // If the manifest response included a <daystart> element, we want to save
+ // that value for any extensions which had sent a ping in the request.
+ if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
+ results->daystart_elapsed_seconds >= 0) {
+ Time day_start =
+ Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
+
+ const std::set<std::string>& extension_ids = fetch_data.extension_ids();
+ std::set<std::string>::const_iterator i;
+ for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
+ const std::string& id = *i;
+ ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
+ result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
+ result.day_start = day_start;
+ }
+ }
+
+ NotifyExtensionsDownloadFailed(
+ not_updated,
+ fetch_data.request_ids(),
+ ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
+}
+
+void ExtensionDownloader::DetermineUpdates(
+ const ManifestFetchData& fetch_data,
+ const UpdateManifest::Results& possible_updates,
+ std::vector<int>* result) {
+ for (size_t i = 0; i < possible_updates.list.size(); i++) {
+ const UpdateManifest::Result* update = &possible_updates.list[i];
+ const std::string& id = update->extension_id;
+
+ if (!fetch_data.Includes(id)) {
+ VLOG(2) << "Ignoring " << id << " from this manifest";
+ continue;
+ }
+
+ if (VLOG_IS_ON(2)) {
+ if (update->version.empty())
+ VLOG(2) << "manifest indicates " << id << " has no update";
+ else
+ VLOG(2) << "manifest indicates " << id << " latest version is '"
+ << update->version << "'";
+ }
+
+ if (!delegate_->IsExtensionPending(id)) {
+ // If we're not installing pending extension, and the update
+ // version is the same or older than what's already installed,
+ // we don't want it.
+ std::string version;
+ if (!delegate_->GetExtensionExistingVersion(id, &version)) {
+ VLOG(2) << id << " is not installed";
+ continue;
+ }
+
+ VLOG(2) << id << " is at '" << version << "'";
+
+ // We should skip the version check if update was forced.
+ if (!fetch_data.DidForceUpdate(id)) {
+ Version existing_version(version);
+ Version update_version(update->version);
+ if (!update_version.IsValid() ||
+ update_version.CompareTo(existing_version) <= 0) {
+ continue;
+ }
+ }
+ }
+
+ // If the update specifies a browser minimum version, do we qualify?
+ if (update->browser_min_version.length() > 0 &&
+ !ExtensionsBrowserClient::Get()->IsMinBrowserVersionSupported(
+ update->browser_min_version)) {
+ // TODO(asargent) - We may want this to show up in the extensions UI
+ // eventually. (http://crbug.com/12547).
+ LOG(WARNING) << "Updated version of extension " << id
+ << " available, but requires chrome version "
+ << update->browser_min_version;
+ continue;
+ }
+ VLOG(2) << "will try to update " << id;
+ result->push_back(i);
+ }
+}
+
+// Begins (or queues up) download of an updated extension.
+void ExtensionDownloader::FetchUpdatedExtension(
+ scoped_ptr<ExtensionFetch> fetch_data) {
+ if (!fetch_data->url.is_valid()) {
+ // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
+ LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
+ << "' for extension " << fetch_data->id;
+ return;
+ }
+
+ for (RequestQueue<ExtensionFetch>::iterator iter = extensions_queue_.begin();
+ iter != extensions_queue_.end();
+ ++iter) {
+ if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
+ iter->request_ids.insert(fetch_data->request_ids.begin(),
+ fetch_data->request_ids.end());
+ return; // already scheduled
+ }
+ }
+
+ if (extensions_queue_.active_request() &&
+ extensions_queue_.active_request()->url == fetch_data->url) {
+ extensions_queue_.active_request()->request_ids.insert(
+ fetch_data->request_ids.begin(), fetch_data->request_ids.end());
+ } else {
+ std::string version;
+ if (extension_cache_ &&
+ extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
+ version == fetch_data->version) {
+ base::FilePath crx_path;
+ // Now get .crx file path and mark extension as used.
+ extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
+ NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
+ } else {
+ extensions_queue_.ScheduleRequest(fetch_data.Pass());
+ }
+ }
+}
+
+void ExtensionDownloader::NotifyDelegateDownloadFinished(
+ scoped_ptr<ExtensionFetch> fetch_data,
+ const base::FilePath& crx_path,
+ bool file_ownership_passed) {
+ delegate_->OnExtensionDownloadFinished(fetch_data->id,
+ crx_path,
+ file_ownership_passed,
+ fetch_data->url,
+ fetch_data->version,
+ ping_results_[fetch_data->id],
+ fetch_data->request_ids);
+ ping_results_.erase(fetch_data->id);
+}
+
+void ExtensionDownloader::CreateExtensionFetcher() {
+ const ExtensionFetch* fetch = extensions_queue_.active_request();
+ extension_fetcher_.reset(net::URLFetcher::Create(
+ kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
+ extension_fetcher_->SetRequestContext(request_context_.get());
+ extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
+
+ int load_flags = net::LOAD_DISABLE_CACHE;
+ bool is_secure = fetch->url.SchemeIsSecure();
+ if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
+ load_flags |= net::LOAD_DO_NOT_SEND_COOKIES | net::LOAD_DO_NOT_SAVE_COOKIES;
+ }
+ extension_fetcher_->SetLoadFlags(load_flags);
+
+ // Download CRX files to a temp file. The blacklist is small and will be
+ // processed in memory, so it is fetched into a string.
+ if (fetch->id != kBlacklistAppID) {
+ extension_fetcher_->SaveResponseToTemporaryFile(
+ BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
+ }
+
+ if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
+ is_secure) {
+ if (access_token_.empty()) {
+ // We should try OAuth2, but we have no token cached. This
+ // ExtensionFetcher will be started once the token fetch is complete,
+ // in either OnTokenFetchSuccess or OnTokenFetchFailure.
+ DCHECK(identity_provider_.get());
+ OAuth2TokenService::ScopeSet webstore_scopes;
+ webstore_scopes.insert(kWebstoreOAuth2Scope);
+ access_token_request_ =
+ identity_provider_->GetTokenService()->StartRequest(
+ identity_provider_->GetActiveAccountId(), webstore_scopes, this);
+ return;
+ }
+ extension_fetcher_->AddExtraRequestHeader(
+ base::StringPrintf("%s: Bearer %s",
+ net::HttpRequestHeaders::kAuthorization,
+ access_token_.c_str()));
+ }
+
+ VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
+ extension_fetcher_->Start();
+}
+
+void ExtensionDownloader::OnCRXFetchComplete(
+ const net::URLFetcher* source,
+ const GURL& url,
+ const net::URLRequestStatus& status,
+ int response_code,
+ const base::TimeDelta& backoff_delay) {
+ ExtensionFetch& active_request = *extensions_queue_.active_request();
+ const std::string& id = active_request.id;
+ if (status.status() == net::URLRequestStatus::SUCCESS &&
+ (response_code == 200 || url.SchemeIsFile())) {
+ RETRY_HISTOGRAM("CrxFetchSuccess",
+ extensions_queue_.active_request_failure_count(),
+ url);
+ base::FilePath crx_path;
+ // Take ownership of the file at |crx_path|.
+ CHECK(source->GetResponseAsFilePath(true, &crx_path));
+ scoped_ptr<ExtensionFetch> fetch_data =
+ extensions_queue_.reset_active_request();
+ if (extension_cache_) {
+ const std::string& version = fetch_data->version;
+ extension_cache_->PutExtension(
+ id,
+ crx_path,
+ version,
+ base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
+ weak_ptr_factory_.GetWeakPtr(),
+ base::Passed(&fetch_data)));
+ } else {
+ NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
+ }
+ } else if (IterateFetchCredentialsAfterFailure(
+ &active_request, status, response_code)) {
+ extensions_queue_.RetryRequest(backoff_delay);
+ } else {
+ const std::set<int>& request_ids = active_request.request_ids;
+ const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
+ VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
+ << "' response code:" << response_code;
+ if (ShouldRetryRequest(status, response_code) &&
+ extensions_queue_.active_request_failure_count() < kMaxRetries) {
+ extensions_queue_.RetryRequest(backoff_delay);
+ } else {
+ RETRY_HISTOGRAM("CrxFetchFailure",
+ extensions_queue_.active_request_failure_count(),
+ url);
+ // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
+ UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
+ delegate_->OnExtensionDownloadFailed(
+ id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
+ }
+ ping_results_.erase(id);
+ extensions_queue_.reset_active_request();
+ }
+
+ extension_fetcher_.reset();
+
+ // If there are any pending downloads left, start the next one.
+ extensions_queue_.StartNextRequest();
+}
+
+void ExtensionDownloader::NotifyExtensionsDownloadFailed(
+ const std::set<std::string>& extension_ids,
+ const std::set<int>& request_ids,
+ ExtensionDownloaderDelegate::Error error) {
+ for (std::set<std::string>::const_iterator it = extension_ids.begin();
+ it != extension_ids.end();
+ ++it) {
+ const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
+ delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
+ ping_results_.erase(*it);
+ }
+}
+
+void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
+ const std::string& version) {
+ UpdateDetails updateInfo(id, Version(version));
+ content::NotificationService::current()->Notify(
+ extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
+ content::NotificationService::AllBrowserContextsAndSources(),
+ content::Details<UpdateDetails>(&updateInfo));
+}
+
+bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
+ ExtensionFetch* fetch,
+ const net::URLRequestStatus& status,
+ int response_code) {
+ bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
+ (status.status() == net::URLRequestStatus::SUCCESS &&
+ (response_code == net::HTTP_UNAUTHORIZED ||
+ response_code == net::HTTP_FORBIDDEN));
+ if (!auth_failure) {
+ return false;
+ }
+ // Here we decide what to do next if the server refused to authorize this
+ // fetch.
+ switch (fetch->credentials) {
+ case ExtensionFetch::CREDENTIALS_NONE:
+ if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
+ } else {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
+ }
+ return true;
+ case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
+ fetch->oauth2_attempt_count++;
+ // OAuth2 may fail due to an expired access token, in which case we
+ // should invalidate the token and try again.
+ if (response_code == net::HTTP_UNAUTHORIZED &&
+ fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
+ DCHECK(identity_provider_.get());
+ OAuth2TokenService::ScopeSet webstore_scopes;
+ webstore_scopes.insert(kWebstoreOAuth2Scope);
+ identity_provider_->GetTokenService()->InvalidateToken(
+ identity_provider_->GetActiveAccountId(),
+ webstore_scopes,
+ access_token_);
+ access_token_.clear();
+ return true;
+ }
+ // Either there is no Gaia identity available, the active identity
+ // doesn't have access to this resource, or the server keeps returning
+ // 401s and we've retried too many times. Fall back on cookies.
+ if (access_token_.empty() || response_code == net::HTTP_FORBIDDEN ||
+ fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
+ return true;
+ }
+ // Something else is wrong. Time to give up.
+ return false;
+ case ExtensionFetch::CREDENTIALS_COOKIES:
+ if (response_code == net::HTTP_FORBIDDEN) {
+ // Try the next session identity, up to some maximum.
+ return IncrementAuthUserIndex(&fetch->url);
+ }
+ return false;
+ default:
+ NOTREACHED();
+ }
+ NOTREACHED();
+ return false;
+}
+
+void ExtensionDownloader::OnGetTokenSuccess(
+ const OAuth2TokenService::Request* request,
+ const std::string& access_token,
+ const base::Time& expiration_time) {
+ access_token_ = access_token;
+ extension_fetcher_->AddExtraRequestHeader(
+ base::StringPrintf("%s: Bearer %s",
+ net::HttpRequestHeaders::kAuthorization,
+ access_token_.c_str()));
+ extension_fetcher_->Start();
+}
+
+void ExtensionDownloader::OnGetTokenFailure(
+ const OAuth2TokenService::Request* request,
+ const GoogleServiceAuthError& error) {
+ // If we fail to get an access token, kick the pending fetch and let it fall
+ // back on cookies.
+ extension_fetcher_->Start();
+}
+
+ManifestFetchData* ExtensionDownloader::CreateManifestFetchData(
+ const GURL& update_url,
+ int request_id) {
+ ManifestFetchData::PingMode ping_mode = ManifestFetchData::NO_PING;
+ if (update_url.DomainIs(ping_enabled_domain_.c_str())) {
+ if (enable_extra_update_metrics_) {
+ ping_mode = ManifestFetchData::PING_WITH_METRICS;
+ } else {
+ ping_mode = ManifestFetchData::PING;
+ }
+ }
+ return new ManifestFetchData(
+ update_url, request_id, brand_code_, manifest_query_params_, ping_mode);
+}
+
+} // namespace extensions
diff --git a/extensions/browser/updater/extension_downloader.h b/extensions/browser/updater/extension_downloader.h
new file mode 100644
index 0000000..6c0462c
--- /dev/null
+++ b/extensions/browser/updater/extension_downloader.h
@@ -0,0 +1,334 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef EXTENSIONS_BROWSER_UPDATER_EXTENSION_DOWNLOADER_H_
+#define EXTENSIONS_BROWSER_UPDATER_EXTENSION_DOWNLOADER_H_
+
+#include <deque>
+#include <map>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/basictypes.h"
+#include "base/compiler_specific.h"
+#include "base/memory/linked_ptr.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/version.h"
+#include "extensions/browser/updater/extension_downloader_delegate.h"
+#include "extensions/browser/updater/manifest_fetch_data.h"
+#include "extensions/browser/updater/request_queue.h"
+#include "extensions/common/extension.h"
+#include "extensions/common/update_manifest.h"
+#include "google_apis/gaia/oauth2_token_service.h"
+#include "net/url_request/url_fetcher_delegate.h"
+#include "url/gurl.h"
+
+class IdentityProvider;
+
+namespace net {
+class URLFetcher;
+class URLRequestContextGetter;
+class URLRequestStatus;
+}
+
+namespace extensions {
+
+struct UpdateDetails {
+ UpdateDetails(const std::string& id, const base::Version& version);
+ ~UpdateDetails();
+
+ std::string id;
+ base::Version version;
+};
+
+class ExtensionCache;
+class ExtensionUpdaterTest;
+
+// A class that checks for updates of a given list of extensions, and downloads
+// the crx file when updates are found. It uses a |ExtensionDownloaderDelegate|
+// that takes ownership of the downloaded crx files, and handles events during
+// the update check.
+class ExtensionDownloader : public net::URLFetcherDelegate,
+ public OAuth2TokenService::Consumer {
+ public:
+ // A closure which constructs a new ExtensionDownloader to be owned by the
+ // caller.
+ typedef base::Callback<scoped_ptr<ExtensionDownloader>(
+ ExtensionDownloaderDelegate* delegate)> Factory;
+
+ // |delegate| is stored as a raw pointer and must outlive the
+ // ExtensionDownloader.
+ ExtensionDownloader(ExtensionDownloaderDelegate* delegate,
+ net::URLRequestContextGetter* request_context);
+ virtual ~ExtensionDownloader();
+
+ // Adds |extension| to the list of extensions to check for updates.
+ // Returns false if the |extension| can't be updated due to invalid details.
+ // In that case, no callbacks will be performed on the |delegate_|.
+ // The |request_id| is passed on as is to the various |delegate_| callbacks.
+ // This is used for example by ExtensionUpdater to keep track of when
+ // potentially concurrent update checks complete.
+ bool AddExtension(const Extension& extension, int request_id);
+
+ // Adds extension |id| to the list of extensions to check for updates.
+ // Returns false if the |id| can't be updated due to invalid details.
+ // In that case, no callbacks will be performed on the |delegate_|.
+ // The |request_id| is passed on as is to the various |delegate_| callbacks.
+ // This is used for example by ExtensionUpdater to keep track of when
+ // potentially concurrent update checks complete.
+ bool AddPendingExtension(const std::string& id,
+ const GURL& update_url,
+ int request_id);
+
+ // Schedules a fetch of the manifest of all the extensions added with
+ // AddExtension() and AddPendingExtension().
+ void StartAllPending(ExtensionCache* cache);
+
+ // Schedules an update check of the blacklist.
+ void StartBlacklistUpdate(const std::string& version,
+ const ManifestFetchData::PingData& ping_data,
+ int request_id);
+
+ // Sets an IdentityProvider to be used for OAuth2 authentication on protected
+ // Webstore downloads.
+ void SetWebstoreIdentityProvider(
+ scoped_ptr<IdentityProvider> identity_provider);
+
+ void set_brand_code(const std::string& brand_code) {
+ brand_code_ = brand_code;
+ }
+
+ void set_manifest_query_params(const std::string& params) {
+ manifest_query_params_ = params;
+ }
+
+ void set_ping_enabled_domain(const std::string& domain) {
+ ping_enabled_domain_ = domain;
+ }
+
+ void set_enable_extra_update_metrics(bool enable) {
+ enable_extra_update_metrics_ = enable;
+ }
+
+ // These are needed for unit testing, to help identify the correct mock
+ // URLFetcher objects.
+ static const int kManifestFetcherId = 1;
+ static const int kExtensionFetcherId = 2;
+
+ // Update AppID for extension blacklist.
+ static const char kBlacklistAppID[];
+
+ static const int kMaxRetries = 10;
+
+ private:
+ friend class ExtensionUpdaterTest;
+
+ // These counters are bumped as extensions are added to be fetched. They
+ // are then recorded as UMA metrics when all the extensions have been added.
+ struct URLStats {
+ URLStats()
+ : no_url_count(0),
+ google_url_count(0),
+ other_url_count(0),
+ extension_count(0),
+ theme_count(0),
+ app_count(0),
+ platform_app_count(0),
+ pending_count(0) {}
+
+ int no_url_count, google_url_count, other_url_count;
+ int extension_count, theme_count, app_count, platform_app_count,
+ pending_count;
+ };
+
+ // We need to keep track of some information associated with a url
+ // when doing a fetch.
+ struct ExtensionFetch {
+ ExtensionFetch();
+ ExtensionFetch(const std::string& id,
+ const GURL& url,
+ const std::string& package_hash,
+ const std::string& version,
+ const std::set<int>& request_ids);
+ ~ExtensionFetch();
+
+ std::string id;
+ GURL url;
+ std::string package_hash;
+ std::string version;
+ std::set<int> request_ids;
+
+ enum CredentialsMode {
+ CREDENTIALS_NONE = 0,
+ CREDENTIALS_OAUTH2_TOKEN,
+ CREDENTIALS_COOKIES,
+ };
+
+ // Indicates the type of credentials to include with this fetch.
+ CredentialsMode credentials;
+
+ // Counts the number of times OAuth2 authentication has been attempted for
+ // this fetch.
+ int oauth2_attempt_count;
+ };
+
+ // Helper for AddExtension() and AddPendingExtension().
+ bool AddExtensionData(const std::string& id,
+ const base::Version& version,
+ Manifest::Type extension_type,
+ const GURL& extension_update_url,
+ const std::string& update_url_data,
+ int request_id,
+ bool force_update,
+ const std::string& install_source_override);
+
+ // Adds all recorded stats taken so far to histogram counts.
+ void ReportStats() const;
+
+ // Begins an update check.
+ void StartUpdateCheck(scoped_ptr<ManifestFetchData> fetch_data);
+
+ // Called by RequestQueue when a new manifest fetch request is started.
+ void CreateManifestFetcher();
+
+ // net::URLFetcherDelegate implementation.
+ virtual void OnURLFetchComplete(const net::URLFetcher* source) override;
+
+ // Handles the result of a manifest fetch.
+ void OnManifestFetchComplete(const GURL& url,
+ const net::URLRequestStatus& status,
+ int response_code,
+ const base::TimeDelta& backoff_delay,
+ const std::string& data);
+
+ // Once a manifest is parsed, this starts fetches of any relevant crx files.
+ // If |results| is null, it means something went wrong when parsing it.
+ void HandleManifestResults(const ManifestFetchData& fetch_data,
+ const UpdateManifest::Results* results);
+
+ // Given a list of potential updates, returns the indices of the ones that are
+ // applicable (are actually a new version, etc.) in |result|.
+ void DetermineUpdates(const ManifestFetchData& fetch_data,
+ const UpdateManifest::Results& possible_updates,
+ std::vector<int>* result);
+
+ // Begins (or queues up) download of an updated extension.
+ void FetchUpdatedExtension(scoped_ptr<ExtensionFetch> fetch_data);
+
+ // Called by RequestQueue when a new extension fetch request is started.
+ void CreateExtensionFetcher();
+
+ // Handles the result of a crx fetch.
+ void OnCRXFetchComplete(const net::URLFetcher* source,
+ const GURL& url,
+ const net::URLRequestStatus& status,
+ int response_code,
+ const base::TimeDelta& backoff_delay);
+
+ // Invokes OnExtensionDownloadFailed() on the |delegate_| for each extension
+ // in the set, with |error| as the reason for failure.
+ void NotifyExtensionsDownloadFailed(const std::set<std::string>& id_set,
+ const std::set<int>& request_ids,
+ ExtensionDownloaderDelegate::Error error);
+
+ // Send a notification that an update was found for |id| that we'll
+ // attempt to download.
+ void NotifyUpdateFound(const std::string& id, const std::string& version);
+
+ // Do real work of StartAllPending. If .crx cache is used, this function
+ // is called when cache is ready.
+ void DoStartAllPending();
+
+ // Notify delegate and remove ping results.
+ void NotifyDelegateDownloadFinished(scoped_ptr<ExtensionFetch> fetch_data,
+ const base::FilePath& crx_path,
+ bool file_ownership_passed);
+
+ // Potentially updates an ExtensionFetch's authentication state and returns
+ // |true| if the fetch should be retried. Returns |false| if the failure was
+ // not related to authentication, leaving the ExtensionFetch data unmodified.
+ bool IterateFetchCredentialsAfterFailure(ExtensionFetch* fetch,
+ const net::URLRequestStatus& status,
+ int response_code);
+
+ // OAuth2TokenService::Consumer implementation.
+ virtual void OnGetTokenSuccess(const OAuth2TokenService::Request* request,
+ const std::string& access_token,
+ const base::Time& expiration_time) override;
+ virtual void OnGetTokenFailure(const OAuth2TokenService::Request* request,
+ const GoogleServiceAuthError& error) override;
+
+ ManifestFetchData* CreateManifestFetchData(const GURL& update_url,
+ int request_id);
+
+ // The delegate that receives the crx files downloaded by the
+ // ExtensionDownloader, and that fills in optional ping and update url data.
+ ExtensionDownloaderDelegate* delegate_;
+
+ // The request context to use for the URLFetchers.
+ scoped_refptr<net::URLRequestContextGetter> request_context_;
+
+ // Collects UMA samples that are reported when ReportStats() is called.
+ URLStats url_stats_;
+
+ // List of data on fetches we're going to do. We limit the number of
+ // extensions grouped together in one batch to avoid running into the limits
+ // on the length of http GET requests, so there might be multiple
+ // ManifestFetchData* objects with the same base_url.
+ typedef std::map<std::pair<int, GURL>,
+ std::vector<linked_ptr<ManifestFetchData>>> FetchMap;
+ FetchMap fetches_preparing_;
+
+ // Outstanding url fetch requests for manifests and updates.
+ scoped_ptr<net::URLFetcher> manifest_fetcher_;
+ scoped_ptr<net::URLFetcher> extension_fetcher_;
+
+ // Pending manifests and extensions to be fetched when the appropriate fetcher
+ // is available.
+ RequestQueue<ManifestFetchData> manifests_queue_;
+ RequestQueue<ExtensionFetch> extensions_queue_;
+
+ // Maps an extension-id to its PingResult data.
+ std::map<std::string, ExtensionDownloaderDelegate::PingResult> ping_results_;
+
+ // Cache for .crx files.
+ ExtensionCache* extension_cache_;
+
+ // An IdentityProvider which may be used for authentication on protected
+ // download requests. May be NULL.
+ scoped_ptr<IdentityProvider> identity_provider_;
+
+ // A Webstore download-scoped access token for the |identity_provider_|'s
+ // active account, if any.
+ std::string access_token_;
+
+ // A pending token fetch request.
+ scoped_ptr<OAuth2TokenService::Request> access_token_request_;
+
+ // Brand code to include with manifest fetch queries if sending ping data.
+ std::string brand_code_;
+
+ // Baseline parameters to include with manifest fetch queries.
+ std::string manifest_query_params_;
+
+ // Domain to enable ping data. Ping data will be sent with manifest fetches
+ // to update URLs which match this domain. Defaults to empty (no domain).
+ std::string ping_enabled_domain_;
+
+ // Indicates whether or not extra metrics should be included with ping data.
+ // Defaults to |false|.
+ bool enable_extra_update_metrics_;
+
+ // Used to create WeakPtrs to |this|.
+ base::WeakPtrFactory<ExtensionDownloader> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(ExtensionDownloader);
+};
+
+} // namespace extensions
+
+#endif // EXTENSIONS_BROWSER_UPDATER_EXTENSION_DOWNLOADER_H_
diff --git a/extensions/browser/updater/extension_downloader_delegate.cc b/extensions/browser/updater/extension_downloader_delegate.cc
new file mode 100644
index 0000000..a82f5ca
--- /dev/null
+++ b/extensions/browser/updater/extension_downloader_delegate.cc
@@ -0,0 +1,44 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "extensions/browser/updater/extension_downloader_delegate.h"
+
+#include "base/logging.h"
+#include "base/version.h"
+
+namespace extensions {
+
+ExtensionDownloaderDelegate::PingResult::PingResult() : did_ping(false) {
+}
+
+ExtensionDownloaderDelegate::PingResult::~PingResult() {
+}
+
+ExtensionDownloaderDelegate::~ExtensionDownloaderDelegate() {
+}
+
+void ExtensionDownloaderDelegate::OnExtensionDownloadFailed(
+ const std::string& id,
+ ExtensionDownloaderDelegate::Error error,
+ const ExtensionDownloaderDelegate::PingResult& ping_result,
+ const std::set<int>& request_id) {
+}
+
+bool ExtensionDownloaderDelegate::GetPingDataForExtension(
+ const std::string& id,
+ ManifestFetchData::PingData* ping) {
+ return false;
+}
+
+std::string ExtensionDownloaderDelegate::GetUpdateUrlData(
+ const std::string& id) {
+ return std::string();
+}
+
+bool ExtensionDownloaderDelegate::ShouldForceUpdate(const std::string& id,
+ std::string* source) {
+ return false;
+}
+
+} // namespace extensions
diff --git a/extensions/browser/updater/extension_downloader_delegate.h b/extensions/browser/updater/extension_downloader_delegate.h
new file mode 100644
index 0000000..aed6885
--- /dev/null
+++ b/extensions/browser/updater/extension_downloader_delegate.h
@@ -0,0 +1,124 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef EXTENSIONS_BROWSER_UPDATER_EXTENSION_DOWNLOADER_DELEGATE_H_
+#define EXTENSIONS_BROWSER_UPDATER_EXTENSION_DOWNLOADER_DELEGATE_H_
+
+#include <set>
+#include <string>
+
+#include "base/time/time.h"
+#include "extensions/browser/updater/manifest_fetch_data.h"
+
+class GURL;
+
+namespace base {
+class FilePath;
+}
+
+namespace extensions {
+
+class ExtensionDownloaderDelegate {
+ public:
+ virtual ~ExtensionDownloaderDelegate();
+
+ // Passed as an argument to ExtensionDownloader::OnExtensionDownloadFailed()
+ // to detail the reason for the failure.
+ enum Error {
+ // Background networking is disabled.
+ DISABLED,
+
+ // Failed to fetch the manifest for this extension.
+ MANIFEST_FETCH_FAILED,
+
+ // The manifest couldn't be parsed.
+ MANIFEST_INVALID,
+
+ // The manifest was fetched and parsed, and there are no updates for
+ // this extension.
+ NO_UPDATE_AVAILABLE,
+
+ // There was an update for this extension but the download of the crx
+ // failed.
+ CRX_FETCH_FAILED,
+ };
+
+ // Passed as an argument to the completion callbacks to signal whether
+ // the extension update sent a ping.
+ struct PingResult {
+ PingResult();
+ ~PingResult();
+
+ // Whether a ping was sent.
+ bool did_ping;
+
+ // The start of day, from the server's perspective. This is only valid
+ // when |did_ping| is true.
+ base::Time day_start;
+ };
+
+ // One of the following 3 methods is always invoked for a given extension
+ // id, if AddExtension() or AddPendingExtension() returned true when that
+ // extension was added to the ExtensionDownloader.
+ // To avoid duplicate work, ExtensionDownloader might merge multiple identical
+ // requests, so there is not necessarily a separate invocation of one of these
+ // methods for each call to AddExtension/AddPendingExtension. If it is
+ // important to be able to match up AddExtension calls with
+ // OnExtensionDownload callbacks, you need to make sure that for every call to
+ // AddExtension/AddPendingExtension the combination of extension id and
+ // request id is unique. The OnExtensionDownload related callbacks will then
+ // be called with all request ids that resulted in that extension being
+ // checked.
+
+ // Invoked if the extension couldn't be downloaded. |error| contains the
+ // failure reason.
+ virtual void OnExtensionDownloadFailed(const std::string& id,
+ Error error,
+ const PingResult& ping_result,
+ const std::set<int>& request_ids);
+
+ // Invoked if the extension had an update available and its crx was
+ // successfully downloaded to |path|. |ownership_passed| is true if delegate
+ // should get ownership of the file.
+ virtual void OnExtensionDownloadFinished(
+ const std::string& id,
+ const base::FilePath& path,
+ bool file_ownership_passed,
+ const GURL& download_url,
+ const std::string& version,
+ const PingResult& ping_result,
+ const std::set<int>& request_ids) = 0;
+
+ // The remaining methods are used by the ExtensionDownloader to retrieve
+ // information about extensions from the delegate.
+
+ // Invoked to fill the PingData for the given extension id. Returns false
+ // if PingData should not be included for this extension's update check
+ // (this is the default).
+ virtual bool GetPingDataForExtension(const std::string& id,
+ ManifestFetchData::PingData* ping);
+
+ // Invoked to get the update url data for this extension's update url, if
+ // there is any. The default implementation returns an empty string.
+ virtual std::string GetUpdateUrlData(const std::string& id);
+
+ // Invoked to determine whether extension |id| is currently
+ // pending installation.
+ virtual bool IsExtensionPending(const std::string& id) = 0;
+
+ // Invoked to get the current version of extension |id|. Returns false if
+ // that extension is not installed.
+ virtual bool GetExtensionExistingVersion(const std::string& id,
+ std::string* version) = 0;
+
+ // Determines if a given extension should be forced to update and (if so)
+ // what the source of this forcing is (i.e. what string will be passed
+ // in |installsource| as part of the update query parameters). The default
+ // implementation always returns |false|.
+ virtual bool ShouldForceUpdate(const std::string& id, std::string* source);
+};
+
+} // namespace extensions
+
+#endif // EXTENSIONS_BROWSER_UPDATER_EXTENSION_DOWNLOADER_DELEGATE_H_