summaryrefslogtreecommitdiffstats
path: root/ios
diff options
context:
space:
mode:
authorxunjieli <xunjieli@chromium.org>2015-11-06 07:38:56 -0800
committerCommit bot <commit-bot@chromium.org>2015-11-06 15:39:44 +0000
commita505cbb45ec92d2617fe80490acf7d01ef4d3225 (patch)
treedaaf2ab8a9615dca757b70dc15a55be824f59cc4 /ios
parentebe5d99c6d713d6f1be12e9a0f4a678e31be9aa0 (diff)
downloadchromium_src-a505cbb45ec92d2617fe80490acf7d01ef4d3225.zip
chromium_src-a505cbb45ec92d2617fe80490acf7d01ef4d3225.tar.gz
chromium_src-a505cbb45ec92d2617fe80490acf7d01ef4d3225.tar.bz2
URLRequestJob: change ReadRawData contract
This CL is patched from ellyjones@ CL at crrev.com/1227893004. Previously, the interface for URLRequestJob::ReadRawData was as follows: bool ReadRawData(IOBuffer*, int, int*) Subclasses were expected to signal completion of the ReadRawData call by calling NotifyDone, SetStatus, or maybe one of the other Notify* functions on URLRequestJob, most of which do internal housekeeping and also drive the URLRequest's state machine. This made it difficult to reason about the URLRequestJob's state machine and needlessly complicated most of URLRequestJob. The new interface is as follows: int ReadRawData(IOBuffer*, int) Subclasses are required to either: a) Return ERR_IO_PENDING, and call ReadRawDataComplete when the read completes in any way, or b) Return a count of bytes read >= 0, indicating synchronous success, or c) Return another error code < 0, indicating synchronous failure. This substantially narrows the interface between URLRequestJob and its subclasses and moves the logic for the URLRequest state machine largely into URLRequestJob. Also, the signature of URLRequestJob::ReadFilteredData and some other internal URLRequestJob helpers changes to propagate detailed error codes instead of coercing all errors to FAILED. BUG=474859 BUG=329902 Review URL: https://codereview.chromium.org/1410643007 Cr-Commit-Position: refs/heads/master@{#358327}
Diffstat (limited to 'ios')
-rw-r--r--ios/web/webui/url_data_manager_ios_backend.cc36
1 files changed, 12 insertions, 24 deletions
diff --git a/ios/web/webui/url_data_manager_ios_backend.cc b/ios/web/webui/url_data_manager_ios_backend.cc
index cc3e3e9..d5117c2 100644
--- a/ios/web/webui/url_data_manager_ios_backend.cc
+++ b/ios/web/webui/url_data_manager_ios_backend.cc
@@ -96,7 +96,7 @@ class URLRequestChromeJob : public net::URLRequestJob {
// net::URLRequestJob implementation.
void Start() override;
void Kill() override;
- bool ReadRawData(net::IOBuffer* buf, int buf_size, int* bytes_read) override;
+ int ReadRawData(net::IOBuffer* buf, int buf_size) override;
bool GetMimeType(std::string* mime_type) const override;
int GetResponseCode() const override;
void GetResponseInfo(net::HttpResponseInfo* info) override;
@@ -142,7 +142,7 @@ class URLRequestChromeJob : public net::URLRequestJob {
// Do the actual copy from data_ (the data we're serving) into |buf|.
// Separate from ReadRawData so we can handle async I/O.
- void CompleteRead(net::IOBuffer* buf, int buf_size, int* bytes_read);
+ int CompleteRead(net::IOBuffer* buf, int buf_size);
// The actual data we're serving. NULL until it's been fetched.
scoped_refptr<base::RefCountedMemory> data_;
@@ -291,58 +291,46 @@ void URLRequestChromeJob::MimeTypeAvailable(const std::string& mime_type) {
void URLRequestChromeJob::DataAvailable(base::RefCountedMemory* bytes) {
TRACE_EVENT_ASYNC_END0("browser", "DataManager:Request", this);
if (bytes) {
- // The request completed, and we have all the data.
- // Clear any IO pending status.
- SetStatus(net::URLRequestStatus());
-
data_ = bytes;
- int bytes_read;
if (pending_buf_.get()) {
CHECK(pending_buf_->data());
- CompleteRead(pending_buf_.get(), pending_buf_size_, &bytes_read);
+ int rv = CompleteRead(pending_buf_.get(), pending_buf_size_);
pending_buf_ = NULL;
- NotifyReadComplete(bytes_read);
+ ReadRawDataComplete(rv);
}
} else {
- // The request failed.
- NotifyDone(
- net::URLRequestStatus(net::URLRequestStatus::FAILED, net::ERR_FAILED));
+ ReadRawDataComplete(net::ERR_FAILED);
}
}
-bool URLRequestChromeJob::ReadRawData(net::IOBuffer* buf,
- int buf_size,
- int* bytes_read) {
+int URLRequestChromeJob::ReadRawData(net::IOBuffer* buf, int buf_size) {
if (!data_.get()) {
- SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
DCHECK(!pending_buf_.get());
CHECK(buf->data());
pending_buf_ = buf;
pending_buf_size_ = buf_size;
- return false; // Tell the caller we're still waiting for data.
+ return net::ERR_IO_PENDING; // Tell the caller we're still waiting for
+ // data.
}
// Otherwise, the data is available.
- CompleteRead(buf, buf_size, bytes_read);
- return true;
+ return CompleteRead(buf, buf_size);
}
-void URLRequestChromeJob::CompleteRead(net::IOBuffer* buf,
- int buf_size,
- int* bytes_read) {
+int URLRequestChromeJob::CompleteRead(net::IOBuffer* buf, int buf_size) {
// http://crbug.com/373841
char url_buf[128];
base::strlcpy(url_buf, request_->url().spec().c_str(), arraysize(url_buf));
base::debug::Alias(url_buf);
- int remaining = static_cast<int>(data_->size()) - data_offset_;
+ int remaining = data_->size() - data_offset_;
if (buf_size > remaining)
buf_size = remaining;
if (buf_size > 0) {
memcpy(buf->data(), data_->front() + data_offset_, buf_size);
data_offset_ += buf_size;
}
- *bytes_read = buf_size;
+ return buf_size;
}
namespace {