summaryrefslogtreecommitdiffstats
path: root/content/browser
diff options
context:
space:
mode:
authorrdsmith@chromium.org <rdsmith@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-07-18 18:24:43 +0000
committerrdsmith@chromium.org <rdsmith@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-07-18 18:24:43 +0000
commit5461067e0985187cc8b01043c2b57ef242ff2f6c (patch)
tree6d40a37bb0bacf7447d994654f183c8b504c61d8 /content/browser
parent5133bc4309b20528491db2d507c5ffdd02df98ae (diff)
downloadchromium_src-5461067e0985187cc8b01043c2b57ef242ff2f6c.zip
chromium_src-5461067e0985187cc8b01043c2b57ef242ff2f6c.tar.gz
chromium_src-5461067e0985187cc8b01043c2b57ef242ff2f6c.tar.bz2
Revert 92870 - Modified cancel and interrupt processing to avoid race with history.
Avoid racing with the history callback by unilaterally removing DownloadItem from queues on cancel/interrupt. This keeps the state<->queue correspondence cleaner, and avoids leaving things on queues during shutdown. It might also fix 85408; we'll see :-}. BUG=85408 TEST= Review URL: http://codereview.chromium.org/7294013 TBR=rdsmith@chromium.org Review URL: http://codereview.chromium.org/7401024 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@92872 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content/browser')
-rw-r--r--content/browser/download/save_package.cc2
-rw-r--r--content/browser/net/url_request_slow_download_job.cc51
-rw-r--r--content/browser/net/url_request_slow_download_job.h17
3 files changed, 21 insertions, 49 deletions
diff --git a/content/browser/download/save_package.cc b/content/browser/download/save_package.cc
index 0f05d9d..6420268 100644
--- a/content/browser/download/save_package.cc
+++ b/content/browser/download/save_package.cc
@@ -643,7 +643,7 @@ void SavePackage::Stop() {
wait_state_ = FAILED;
// Inform the DownloadItem we have canceled whole save page job.
- download_->Cancel();
+ download_->Cancel(false);
}
void SavePackage::CheckFinish() {
diff --git a/content/browser/net/url_request_slow_download_job.cc b/content/browser/net/url_request_slow_download_job.cc
index 4cb152c..ef1370f 100644
--- a/content/browser/net/url_request_slow_download_job.cc
+++ b/content/browser/net/url_request_slow_download_job.cc
@@ -10,11 +10,9 @@
#include "base/string_util.h"
#include "googleurl/src/gurl.h"
#include "net/base/io_buffer.h"
-#include "net/base/net_errors.h"
#include "net/http/http_response_headers.h"
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_filter.h"
-#include "net/url_request/url_request_status.h"
const int kFirstDownloadSize = 1024 * 35;
const int kSecondDownloadSize = 1024 * 10;
@@ -25,18 +23,9 @@ const char URLRequestSlowDownloadJob::kKnownSizeUrl[] =
"http://url.handled.by.slow.download/download-known-size";
const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] =
"http://url.handled.by.slow.download/download-finish";
-const char URLRequestSlowDownloadJob::kErrorFinishDownloadUrl[] =
- "http://url.handled.by.slow.download/download-error";
std::vector<URLRequestSlowDownloadJob*>
- URLRequestSlowDownloadJob::pending_requests_;
-
-// Return whether this is the finish or error URL.
-static bool IsCompletionUrl(const GURL& url) {
- if (url.spec() == URLRequestSlowDownloadJob::kFinishDownloadUrl)
- return true;
- return (url.spec() == URLRequestSlowDownloadJob::kErrorFinishDownloadUrl);
-}
+ URLRequestSlowDownloadJob::kPendingRequests;
void URLRequestSlowDownloadJob::Start() {
MessageLoop::current()->PostTask(
@@ -54,8 +43,6 @@ void URLRequestSlowDownloadJob::AddUrlHandler() {
&URLRequestSlowDownloadJob::Factory);
filter->AddUrlHandler(GURL(kFinishDownloadUrl),
&URLRequestSlowDownloadJob::Factory);
- filter->AddUrlHandler(GURL(kErrorFinishDownloadUrl),
- &URLRequestSlowDownloadJob::Factory);
}
/*static */
@@ -63,23 +50,19 @@ net::URLRequestJob* URLRequestSlowDownloadJob::Factory(
net::URLRequest* request,
const std::string& scheme) {
URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(request);
- if (!IsCompletionUrl(request->url()))
- URLRequestSlowDownloadJob::pending_requests_.push_back(job);
+ if (request->url().spec() != kFinishDownloadUrl)
+ URLRequestSlowDownloadJob::kPendingRequests.push_back(job);
return job;
}
/* static */
-void URLRequestSlowDownloadJob::FinishPendingRequests(bool error) {
+void URLRequestSlowDownloadJob::FinishPendingRequests() {
typedef std::vector<URLRequestSlowDownloadJob*> JobList;
- for (JobList::iterator it = pending_requests_.begin(); it !=
- pending_requests_.end(); ++it) {
- if (error) {
- (*it)->set_should_error_download();
- } else {
- (*it)->set_should_finish_download();
- }
+ for (JobList::iterator it = kPendingRequests.begin(); it !=
+ kPendingRequests.end(); ++it) {
+ (*it)->set_should_finish_download();
}
- pending_requests_.clear();
+ kPendingRequests.clear();
}
URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request)
@@ -87,21 +70,19 @@ URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(net::URLRequest* request)
first_download_size_remaining_(kFirstDownloadSize),
should_finish_download_(false),
should_send_second_chunk_(false),
- should_error_download_(false),
ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {}
void URLRequestSlowDownloadJob::StartAsync() {
- if (IsCompletionUrl(request_->url())) {
- URLRequestSlowDownloadJob::FinishPendingRequests(
- request_->url().spec() == kErrorFinishDownloadUrl);
- }
+ if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str()))
+ URLRequestSlowDownloadJob::FinishPendingRequests();
NotifyHeadersComplete();
}
bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size,
int *bytes_read) {
- if (IsCompletionUrl(request_->url())) {
+ if (LowerCaseEqualsASCII(kFinishDownloadUrl,
+ request_->url().spec().c_str())) {
*bytes_read = 0;
return true;
}
@@ -151,9 +132,6 @@ void URLRequestSlowDownloadJob::CheckDoneStatus() {
should_send_second_chunk_ = true;
SetStatus(net::URLRequestStatus());
NotifyReadComplete(kSecondDownloadSize);
- } else if (should_error_download_) {
- NotifyDone(
- net::URLRequestStatus(net::URLRequestStatus::FAILED, net::ERR_FAILED));
} else {
MessageLoop::current()->PostDelayedTask(
FROM_HERE,
@@ -176,7 +154,8 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst(
net::HttpResponseInfo* info) const {
// Send back mock headers.
std::string raw_headers;
- if (IsCompletionUrl(request_->url())) {
+ if (LowerCaseEqualsASCII(kFinishDownloadUrl,
+ request_->url().spec().c_str())) {
raw_headers.append(
"HTTP/1.1 200 OK\n"
"Content-type: text/plain\n");
@@ -186,7 +165,7 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst(
"Content-type: application/octet-stream\n"
"Cache-Control: max-age=0\n");
- if (request_->url().spec() == kKnownSizeUrl) {
+ if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) {
raw_headers.append(base::StringPrintf(
"Content-Length: %d\n",
kFirstDownloadSize + kSecondDownloadSize));
diff --git a/content/browser/net/url_request_slow_download_job.h b/content/browser/net/url_request_slow_download_job.h
index 0104213..cef2adc 100644
--- a/content/browser/net/url_request_slow_download_job.h
+++ b/content/browser/net/url_request_slow_download_job.h
@@ -1,13 +1,9 @@
-// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This class simulates a slow download. This used in a UI test to test the
// download manager. Requests to |kUnknownSizeUrl| and |kKnownSizeUrl| start
-// downloads that pause after the first chunk of data is delivered.
-// A later request to |kFinishDownloadUrl| will finish these downloads.
-// A later request to |kErrorFinishDownloadUrl| will cause these
-// downloads to error with |net::ERR_FAILED|.
-// TODO(rdsmith): Update to allow control of returned error.
+// downloads that pause after the first
#ifndef CONTENT_BROWSER_NET_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_
#define CONTENT_BROWSER_NET_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_
@@ -40,7 +36,6 @@ class URLRequestSlowDownloadJob : public net::URLRequestJob {
static const char kUnknownSizeUrl[];
static const char kKnownSizeUrl[];
static const char kFinishDownloadUrl[];
- static const char kErrorFinishDownloadUrl[];
// Adds the testing URLs to the net::URLRequestFilter.
static void AddUrlHandler();
@@ -51,19 +46,17 @@ class URLRequestSlowDownloadJob : public net::URLRequestJob {
void GetResponseInfoConst(net::HttpResponseInfo* info) const;
// Mark all pending requests to be finished. We keep track of pending
- // requests in |pending_requests_|.
- static void FinishPendingRequests(bool error);
- static std::vector<URLRequestSlowDownloadJob*> pending_requests_;
+ // requests in |kPendingRequests|.
+ static void FinishPendingRequests();
+ static std::vector<URLRequestSlowDownloadJob*> kPendingRequests;
void StartAsync();
void set_should_finish_download() { should_finish_download_ = true; }
- void set_should_error_download() { should_error_download_ = true; }
int first_download_size_remaining_;
bool should_finish_download_;
bool should_send_second_chunk_;
- bool should_error_download_;
ScopedRunnableMethodFactory<URLRequestSlowDownloadJob> method_factory_;
};