diff options
author | manzagop <manzagop@chromium.org> | 2014-12-05 21:04:33 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2014-12-06 05:05:02 +0000 |
commit | 3f7d74078e457ddfaf2bf07d1a78131249febc13 (patch) | |
tree | e837525aa074a57b75810bb1fca160aea8db1975 /components/suggestions | |
parent | 9adc3670e368b00ddd900a79baff4f6bd02b6b1e (diff) | |
download | chromium_src-3f7d74078e457ddfaf2bf07d1a78131249febc13.zip chromium_src-3f7d74078e457ddfaf2bf07d1a78131249febc13.tar.gz chromium_src-3f7d74078e457ddfaf2bf07d1a78131249febc13.tar.bz2 |
SuggestionsService undo blacklist functionality.
This CL introduces functionality to undo the blacklisting of a URL,
assuming a UI which allows for undo-ing for a few seconds after
blacklisting. The design consists in deffering blacklisted URL uploads
until they can no longer be undone, which is the simplest case.
Changes:
- BlacklistStore now has a concept of time before a URL is candidate for upload
- SuggestionsService now schedules blacklist uploads factoring in the delay
BUG=430949
Review URL: https://codereview.chromium.org/766053010
Cr-Commit-Position: refs/heads/master@{#307158}
Diffstat (limited to 'components/suggestions')
-rw-r--r-- | components/suggestions/blacklist_store.cc | 117 | ||||
-rw-r--r-- | components/suggestions/blacklist_store.h | 49 | ||||
-rw-r--r-- | components/suggestions/blacklist_store_unittest.cc | 64 | ||||
-rw-r--r-- | components/suggestions/suggestions_service.cc | 112 | ||||
-rw-r--r-- | components/suggestions/suggestions_service.h | 32 | ||||
-rw-r--r-- | components/suggestions/suggestions_service_unittest.cc | 244 |
6 files changed, 478 insertions, 140 deletions
diff --git a/components/suggestions/blacklist_store.cc b/components/suggestions/blacklist_store.cc index 1e92869..304df2f 100644 --- a/components/suggestions/blacklist_store.cc +++ b/components/suggestions/blacklist_store.cc @@ -4,6 +4,7 @@ #include "components/suggestions/blacklist_store.h" +#include <algorithm> #include <set> #include <string> @@ -13,8 +14,10 @@ #include "components/pref_registry/pref_registry_syncable.h" #include "components/suggestions/suggestions_pref_names.h" -namespace suggestions { +using base::TimeDelta; +using base::TimeTicks; +namespace suggestions { namespace { void PopulateBlacklistSet(const SuggestionsBlacklist& blacklist_proto, @@ -36,8 +39,9 @@ void PopulateBlacklistProto(const std::set<std::string>& blacklist_set, } // namespace -BlacklistStore::BlacklistStore(PrefService* profile_prefs) - : pref_service_(profile_prefs) { +BlacklistStore::BlacklistStore(PrefService* profile_prefs, + const base::TimeDelta& upload_delay) + : pref_service_(profile_prefs), upload_delay_(upload_delay) { DCHECK(pref_service_); // Log the blacklist's size. A single BlacklistStore is created for the @@ -55,28 +59,99 @@ bool BlacklistStore::BlacklistUrl(const GURL& url) { SuggestionsBlacklist blacklist_proto; LoadBlacklist(&blacklist_proto); - std::set<std::string> blacklist_set; PopulateBlacklistSet(blacklist_proto, &blacklist_set); - if (!blacklist_set.insert(url.spec()).second) { + bool success = false; + if (blacklist_set.insert(url.spec()).second) { + PopulateBlacklistProto(blacklist_set, &blacklist_proto); + success = StoreBlacklist(blacklist_proto); + } else { // |url| was already in the blacklist. - return true; + success = true; + } + + if (success) { + // Update the blacklist time. + blacklist_times_[url.spec()] = TimeTicks::Now(); } - PopulateBlacklistProto(blacklist_set, &blacklist_proto); - return StoreBlacklist(blacklist_proto); + return success; } -bool BlacklistStore::GetFirstUrlFromBlacklist(GURL* url) { +bool BlacklistStore::GetTimeUntilReadyForUpload(TimeDelta* delta) { SuggestionsBlacklist blacklist; LoadBlacklist(&blacklist); - if (!blacklist.urls_size()) return false; - GURL blacklisted(blacklist.urls(0)); - url->Swap(&blacklisted); + if (!blacklist.urls_size()) + return false; + + // Note: the size is non-negative. + if (blacklist_times_.size() < static_cast<size_t>(blacklist.urls_size())) { + // A url is not in the timestamp map: it's candidate for upload. This can + // happen after a restart. Another (undesired) case when this could happen + // is if more than one instance were created. + *delta = TimeDelta::FromSeconds(0); + return true; + } + + // Find the minimum blacklist time. Note: blacklist_times_ is NOT empty since + // blacklist is non-empty and blacklist_times_ contains as many items. + TimeDelta min_delay = TimeDelta::Max(); + for (const auto& kv : blacklist_times_) { + min_delay = std::min(upload_delay_ - (TimeTicks::Now() - kv.second), + min_delay); + } + DCHECK(min_delay != TimeDelta::Max()); + *delta = std::max(min_delay, TimeDelta::FromSeconds(0)); + return true; } +bool BlacklistStore::GetTimeUntilURLReadyForUpload(const GURL& url, + TimeDelta* delta) { + auto it = blacklist_times_.find(url.spec()); + if (it != blacklist_times_.end()) { + // The url is in the timestamps map. + *delta = std::max(upload_delay_ - (TimeTicks::Now() - it->second), + TimeDelta::FromSeconds(0)); + return true; + } + + // The url still might be in the blacklist. + SuggestionsBlacklist blacklist; + LoadBlacklist(&blacklist); + for (int i = 0; i < blacklist.urls_size(); ++i) { + if (blacklist.urls(i) == url.spec()) { + *delta = TimeDelta::FromSeconds(0); + return true; + } + } + + return false; +} + +bool BlacklistStore::GetCandidateForUpload(GURL* url) { + SuggestionsBlacklist blacklist; + LoadBlacklist(&blacklist); + + for (int i = 0; i < blacklist.urls_size(); ++i) { + bool is_candidate = true; + auto it = blacklist_times_.find(blacklist.urls(i)); + if (it != blacklist_times_.end() && + TimeTicks::Now() < it->second + upload_delay_) { + // URL was added too recently. + is_candidate = false; + } + if (is_candidate) { + GURL blacklisted(blacklist.urls(i)); + url->Swap(&blacklisted); + return true; + } + } + + return false; +} + bool BlacklistStore::RemoveUrl(const GURL& url) { if (!url.is_valid()) return false; const std::string removal_candidate = url.spec(); @@ -84,13 +159,22 @@ bool BlacklistStore::RemoveUrl(const GURL& url) { SuggestionsBlacklist blacklist; LoadBlacklist(&blacklist); + bool removed = false; SuggestionsBlacklist updated_blacklist; for (int i = 0; i < blacklist.urls_size(); ++i) { - if (blacklist.urls(i) != removal_candidate) + if (blacklist.urls(i) == removal_candidate) { + removed = true; + } else { updated_blacklist.add_urls(blacklist.urls(i)); + } + } + + if (removed && StoreBlacklist(updated_blacklist)) { + blacklist_times_.erase(url.spec()); + return true; } - return StoreBlacklist(updated_blacklist); + return false; } void BlacklistStore::FilterSuggestions(SuggestionsProfile* profile) { @@ -133,6 +217,11 @@ void BlacklistStore::RegisterProfilePrefs( user_prefs::PrefRegistrySyncable::UNSYNCABLE_PREF); } + +// Test seam. For simplicity of mock creation. +BlacklistStore::BlacklistStore() { +} + bool BlacklistStore::LoadBlacklist(SuggestionsBlacklist* blacklist) { DCHECK(blacklist); diff --git a/components/suggestions/blacklist_store.h b/components/suggestions/blacklist_store.h index 072e3f6..6d1bc73 100644 --- a/components/suggestions/blacklist_store.h +++ b/components/suggestions/blacklist_store.h @@ -5,7 +5,11 @@ #ifndef COMPONENTS_SUGGESTIONS_BLACKLIST_STORE_H_ #define COMPONENTS_SUGGESTIONS_BLACKLIST_STORE_H_ +#include <map> +#include <string> + #include "base/macros.h" +#include "base/time/time.h" #include "components/suggestions/proto/suggestions.pb.h" #include "url/gurl.h" @@ -17,23 +21,39 @@ class PrefRegistrySyncable; namespace suggestions { -// A helper class for reading, writing and modifying a small blacklist stored -// in the Profile preferences. It also handles SuggestionsProfile -// filtering based on the stored blacklist. +// A helper class for reading, writing, modifying and applying a small URL +// blacklist, pending upload to the server. The class has a concept of time +// duration before which a blacklisted URL becomes candidate for upload to the +// server. Keep in mind most of the operations involve interaction with the disk +// (the profile's preferences). Note that the class should be used as a +// singleton for the upload candidacy to work properly. class BlacklistStore { public: - explicit BlacklistStore(PrefService* profile_prefs); + BlacklistStore( + PrefService* profile_prefs, + const base::TimeDelta& upload_delay = base::TimeDelta::FromSeconds(15)); virtual ~BlacklistStore(); - // Returns true if successful or |url| was already in the blacklist. + // Returns true if successful or |url| was already in the blacklist. If |url| + // was already in the blacklist, its blacklisting timestamp gets updated. virtual bool BlacklistUrl(const GURL& url); - // Sets |url| to the first URL from the blacklist. Returns false if the + // Gets the time until any URL is ready for upload. Returns false if the // blacklist is empty. - virtual bool GetFirstUrlFromBlacklist(GURL* url); + virtual bool GetTimeUntilReadyForUpload(base::TimeDelta* delta); + + // Gets the time until |url| is ready for upload. Returns false if |url| is + // not part of the blacklist. + virtual bool GetTimeUntilURLReadyForUpload(const GURL& url, + base::TimeDelta* delta); + + // Sets |url| to a URL from the blacklist that is candidate for upload. + // Returns false if there is no candidate for upload. + virtual bool GetCandidateForUpload(GURL* url); - // Removes |url| from the stored blacklist. Returns true if successful or if - // |url| is not in the blacklist. + // Removes |url| from the stored blacklist. Returns true if successful, false + // on failure or if |url| was not in the blacklist. Note that this function + // does not enforce a minimum time since blacklist before removal. virtual bool RemoveUrl(const GURL& url); // Applies the blacklist to |suggestions|. @@ -44,7 +64,7 @@ class BlacklistStore { protected: // Test seam. For simplicity of mock creation. - BlacklistStore() {} + BlacklistStore(); // Loads the blacklist data from the Profile preferences into // |blacklist|. If there is a problem with loading, the pref value is @@ -63,6 +83,15 @@ class BlacklistStore { // The pref service used to persist the suggestions blacklist. PrefService* pref_service_; + // Delay after which a URL becomes candidate for upload, measured from the + // last time the URL was added. + base::TimeDelta upload_delay_; + + // The times at which URLs were blacklisted. Used to determine when a URL is + // valid for server upload. Guaranteed to contain URLs that are not ready for + // upload. Might not contain URLs that are ready for upload. + std::map<std::string, base::TimeTicks> blacklist_times_; + DISALLOW_COPY_AND_ASSIGN(BlacklistStore); }; diff --git a/components/suggestions/blacklist_store_unittest.cc b/components/suggestions/blacklist_store_unittest.cc index d7d1fa8..9b2f8be 100644 --- a/components/suggestions/blacklist_store_unittest.cc +++ b/components/suggestions/blacklist_store_unittest.cc @@ -69,6 +69,7 @@ class BlacklistStoreTest : public testing::Test { DISALLOW_COPY_AND_ASSIGN(BlacklistStoreTest); }; +// Tests adding, removing to the blacklist and filtering. TEST_F(BlacklistStoreTest, BasicInteractions) { BlacklistStore blacklist_store(pref_service()); @@ -108,24 +109,65 @@ TEST_F(BlacklistStoreTest, BlacklistTwiceSuceeds) { EXPECT_TRUE(blacklist_store.BlacklistUrl(GURL(kTestUrlA))); } -TEST_F(BlacklistStoreTest, RemoveUnknownUrlSucceeds) { +TEST_F(BlacklistStoreTest, RemoveUnknownUrlFails) { BlacklistStore blacklist_store(pref_service()); - EXPECT_TRUE(blacklist_store.RemoveUrl(GURL(kTestUrlA))); + EXPECT_FALSE(blacklist_store.RemoveUrl(GURL(kTestUrlA))); } -TEST_F(BlacklistStoreTest, GetFirstUrlFromBlacklist) { - BlacklistStore blacklist_store(pref_service()); +TEST_F(BlacklistStoreTest, TestGetTimeUntilReadyForUpload) { + // Tests assumes completion within 1 hour. + base::TimeDelta upload_delay = base::TimeDelta::FromHours(1); + base::TimeDelta no_delay = base::TimeDelta::FromHours(0); + scoped_ptr<BlacklistStore> blacklist_store( + new BlacklistStore(pref_service(), upload_delay)); + base::TimeDelta candidate_delta; + + // Blacklist is empty. + EXPECT_FALSE(blacklist_store->GetTimeUntilReadyForUpload(&candidate_delta)); + EXPECT_FALSE(blacklist_store->GetTimeUntilURLReadyForUpload( + GURL(kTestUrlA), &candidate_delta)); + + // Blacklist contains kTestUrlA. + EXPECT_TRUE(blacklist_store->BlacklistUrl(GURL(kTestUrlA))); + candidate_delta = upload_delay + base::TimeDelta::FromDays(1); + EXPECT_TRUE(blacklist_store->GetTimeUntilReadyForUpload(&candidate_delta)); + EXPECT_LE(candidate_delta, upload_delay); + EXPECT_GE(candidate_delta, no_delay); + candidate_delta = upload_delay + base::TimeDelta::FromDays(1); + EXPECT_TRUE(blacklist_store->GetTimeUntilURLReadyForUpload( + GURL(kTestUrlA), &candidate_delta)); + EXPECT_LE(candidate_delta, upload_delay); + EXPECT_GE(candidate_delta, no_delay); + EXPECT_FALSE(blacklist_store->GetTimeUntilURLReadyForUpload( + GURL(kTestUrlB), &candidate_delta)); + + // There should be no candidate for upload since the upload delay is 1 day. + // Note: this is a test that relies on timing. + GURL retrieved; + EXPECT_FALSE(blacklist_store->GetCandidateForUpload(&retrieved)); - // Expect GetFirstUrlFromBlacklist fails when blacklist empty. + // Same, but with an upload delay of 0. + blacklist_store.reset(new BlacklistStore(pref_service(), no_delay)); + EXPECT_TRUE(blacklist_store->BlacklistUrl(GURL(kTestUrlA))); + candidate_delta = no_delay + base::TimeDelta::FromDays(1); + EXPECT_TRUE(blacklist_store->GetTimeUntilReadyForUpload(&candidate_delta)); + EXPECT_EQ(candidate_delta, no_delay); + candidate_delta = no_delay + base::TimeDelta::FromDays(1); + EXPECT_TRUE(blacklist_store->GetTimeUntilURLReadyForUpload( + GURL(kTestUrlA), &candidate_delta)); + EXPECT_EQ(candidate_delta, no_delay); +} + +TEST_F(BlacklistStoreTest, GetCandidateForUpload) { + BlacklistStore blacklist_store(pref_service(), base::TimeDelta::FromDays(0)); + // Empty blacklist. GURL retrieved; - EXPECT_FALSE(blacklist_store.GetFirstUrlFromBlacklist(&retrieved)); + EXPECT_FALSE(blacklist_store.GetCandidateForUpload(&retrieved)); - // Blacklist A and B. + // Blacklist A and B. Expect to retrieve A or B. EXPECT_TRUE(blacklist_store.BlacklistUrl(GURL(kTestUrlA))); EXPECT_TRUE(blacklist_store.BlacklistUrl(GURL(kTestUrlB))); - - // Expect to retrieve A or B. - EXPECT_TRUE(blacklist_store.GetFirstUrlFromBlacklist(&retrieved)); + EXPECT_TRUE(blacklist_store.GetCandidateForUpload(&retrieved)); std::string retrieved_string = retrieved.spec(); EXPECT_TRUE(retrieved_string == std::string(kTestUrlA) || retrieved_string == std::string(kTestUrlB)); @@ -137,7 +179,6 @@ TEST_F(BlacklistStoreTest, LogsBlacklistSize) { // Create a first store - blacklist is empty at this point. scoped_ptr<BlacklistStore> blacklist_store( new BlacklistStore(pref_service())); - histogram_tester.ExpectTotalCount("Suggestions.LocalBlacklistSize", 1); histogram_tester.ExpectUniqueSample("Suggestions.LocalBlacklistSize", 0, 1); @@ -147,7 +188,6 @@ TEST_F(BlacklistStoreTest, LogsBlacklistSize) { // Create a new BlacklistStore and verify the counts. blacklist_store.reset(new BlacklistStore(pref_service())); - histogram_tester.ExpectTotalCount("Suggestions.LocalBlacklistSize", 2); histogram_tester.ExpectBucketCount("Suggestions.LocalBlacklistSize", 0, 1); histogram_tester.ExpectBucketCount("Suggestions.LocalBlacklistSize", 2, 1); diff --git a/components/suggestions/suggestions_service.cc b/components/suggestions/suggestions_service.cc index 44e173b..bb47aa1 100644 --- a/components/suggestions/suggestions_service.cc +++ b/components/suggestions/suggestions_service.cc @@ -30,6 +30,8 @@ #include "url/gurl.h" using base::CancelableClosure; +using base::TimeDelta; +using base::TimeTicks; namespace suggestions { @@ -74,17 +76,15 @@ void DispatchRequestsAndClear( } } -// Default delay used when scheduling a blacklist request. -const int kBlacklistDefaultDelaySec = 1; +// Default delay used when scheduling a request. +const int kDefaultSchedulingDelaySec = 1; -// Multiplier on the delay used when scheduling a blacklist request, in case the -// last observed request was unsuccessful. -const int kBlacklistBackoffMultiplier = 2; +// Multiplier on the delay used when re-scheduling a failed request. +const int kSchedulingBackoffMultiplier = 2; // Maximum valid delay for scheduling a request. Candidate delays larger than -// this are rejected. This means the maximum backoff is at least 300 / 2, i.e. -// 2.5 minutes. -const int kBlacklistMaxDelaySec = 300; // 5 minutes +// this are rejected. This means the maximum backoff is at least 5 / 2 minutes. +const int kSchedulingMaxDelaySec = 5 * 60; } // namespace @@ -110,7 +110,7 @@ SuggestionsService::SuggestionsService( suggestions_store_(suggestions_store.Pass()), thumbnail_manager_(thumbnail_manager.Pass()), blacklist_store_(blacklist_store.Pass()), - blacklist_delay_sec_(kBlacklistDefaultDelaySec), + scheduling_delay_(TimeDelta::FromSeconds(kDefaultSchedulingDelaySec)), suggestions_url_(kSuggestionsURL), blacklist_url_prefix_(kSuggestionsBlacklistURLPrefix), weak_ptr_factory_(this) {} @@ -154,21 +154,40 @@ void SuggestionsService::GetPageThumbnail( void SuggestionsService::BlacklistURL( const GURL& candidate_url, - const SuggestionsService::ResponseCallback& callback) { + const SuggestionsService::ResponseCallback& callback, + const base::Closure& fail_callback) { DCHECK(thread_checker_.CalledOnValidThread()); - waiting_requestors_.push_back(callback); - // Blacklist locally for immediate effect and serve the requestors. - blacklist_store_->BlacklistUrl(candidate_url); + if (!blacklist_store_->BlacklistUrl(candidate_url)) { + fail_callback.Run(); + return; + } + + waiting_requestors_.push_back(callback); ServeFromCache(); + // Blacklist uploads are scheduled on any request completion, so only schedule + // an upload if there is no ongoing request. + if (!pending_request_.get()) { + ScheduleBlacklistUpload(); + } +} - // Send blacklisting request. Even if this request ends up not being sent - // because of an ongoing request, a blacklist request is later scheduled. - // TODO(mathp): Currently, this will not send a request if there is already - // a request in flight (for suggestions or blacklist). Should we prioritize - // blacklist requests since they actually carry a payload? - IssueRequestIfNoneOngoing( - BuildBlacklistRequestURL(blacklist_url_prefix_, candidate_url)); +void SuggestionsService::UndoBlacklistURL( + const GURL& url, + const SuggestionsService::ResponseCallback& callback, + const base::Closure& fail_callback) { + DCHECK(thread_checker_.CalledOnValidThread()); + TimeDelta time_delta; + if (blacklist_store_->GetTimeUntilURLReadyForUpload(url, &time_delta) && + time_delta > TimeDelta::FromSeconds(0) && + blacklist_store_->RemoveUrl(url)) { + // The URL was not yet candidate for upload to the server and could be + // removed from the blacklist. + waiting_requestors_.push_back(callback); + ServeFromCache(); + return; + } + fail_callback.Run(); } // static @@ -219,7 +238,7 @@ void SuggestionsService::IssueRequestIfNoneOngoing(const GURL& url) { } pending_request_.reset(CreateSuggestionsRequest(url)); pending_request_->Start(); - last_request_started_time_ = base::TimeTicks::Now(); + last_request_started_time_ = TimeTicks::Now(); } net::URLFetcher* SuggestionsService::CreateSuggestionsRequest(const GURL& url) { @@ -251,7 +270,8 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { DVLOG(1) << "Suggestions server request failed with error: " << request_status.error() << ": " << net::ErrorToString(request_status.error()); - ScheduleBlacklistUpload(false); + UpdateBlacklistDelay(false); + ScheduleBlacklistUpload(); return; } @@ -261,12 +281,12 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { // A non-200 response code means that server has no (longer) suggestions for // this user. Aggressively clear the cache. suggestions_store_->ClearSuggestions(); - ScheduleBlacklistUpload(false); + UpdateBlacklistDelay(false); + ScheduleBlacklistUpload(); return; } - const base::TimeDelta latency = - base::TimeTicks::Now() - last_request_started_time_; + const TimeDelta latency = TimeTicks::Now() - last_request_started_time_; UMA_HISTOGRAM_MEDIUM_TIMES("Suggestions.FetchSuccessLatency", latency); // Handle a successful blacklisting. @@ -295,7 +315,8 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { LogResponseState(RESPONSE_INVALID); } - ScheduleBlacklistUpload(true); + UpdateBlacklistDelay(true); + ScheduleBlacklistUpload(); } void SuggestionsService::Shutdown() { @@ -317,20 +338,16 @@ void SuggestionsService::FilterAndServe(SuggestionsProfile* suggestions) { DispatchRequestsAndClear(*suggestions, &waiting_requestors_); } -void SuggestionsService::ScheduleBlacklistUpload(bool last_request_successful) { +void SuggestionsService::ScheduleBlacklistUpload() { DCHECK(thread_checker_.CalledOnValidThread()); - - UpdateBlacklistDelay(last_request_successful); - - // Schedule a blacklist upload task. - GURL blacklist_url; - if (blacklist_store_->GetFirstUrlFromBlacklist(&blacklist_url)) { + TimeDelta time_delta; + if (blacklist_store_->GetTimeUntilReadyForUpload(&time_delta)) { + // Blacklist cache is not empty: schedule. base::Closure blacklist_cb = base::Bind(&SuggestionsService::UploadOneFromBlacklist, weak_ptr_factory_.GetWeakPtr()); base::MessageLoopProxy::current()->PostDelayedTask( - FROM_HERE, blacklist_cb, - base::TimeDelta::FromSeconds(blacklist_delay_sec_)); + FROM_HERE, blacklist_cb, time_delta + scheduling_delay_); } } @@ -338,24 +355,29 @@ void SuggestionsService::UploadOneFromBlacklist() { DCHECK(thread_checker_.CalledOnValidThread()); GURL blacklist_url; - if (!blacklist_store_->GetFirstUrlFromBlacklist(&blacklist_url)) - return; // Local blacklist is empty. + if (blacklist_store_->GetCandidateForUpload(&blacklist_url)) { + // Issue a blacklisting request. Even if this request ends up not being sent + // because of an ongoing request, a blacklist request is later scheduled. + IssueRequestIfNoneOngoing( + BuildBlacklistRequestURL(blacklist_url_prefix_, blacklist_url)); + return; + } - // Send blacklisting request. Even if this request ends up not being sent - // because of an ongoing request, a blacklist request is later scheduled. - IssueRequestIfNoneOngoing( - BuildBlacklistRequestURL(blacklist_url_prefix_, blacklist_url)); + // Even though there's no candidate for upload, the blacklist might not be + // empty. + ScheduleBlacklistUpload(); } void SuggestionsService::UpdateBlacklistDelay(bool last_request_successful) { DCHECK(thread_checker_.CalledOnValidThread()); if (last_request_successful) { - blacklist_delay_sec_ = kBlacklistDefaultDelaySec; + scheduling_delay_ = TimeDelta::FromSeconds(kDefaultSchedulingDelaySec); } else { - int candidate_delay = blacklist_delay_sec_ * kBlacklistBackoffMultiplier; - if (candidate_delay < kBlacklistMaxDelaySec) - blacklist_delay_sec_ = candidate_delay; + TimeDelta candidate_delay = + scheduling_delay_ * kSchedulingBackoffMultiplier; + if (candidate_delay < TimeDelta::FromSeconds(kSchedulingMaxDelaySec)) + scheduling_delay_ = candidate_delay; } } diff --git a/components/suggestions/suggestions_service.h b/components/suggestions/suggestions_service.h index 75d3275..16c1d60 100644 --- a/components/suggestions/suggestions_service.h +++ b/components/suggestions/suggestions_service.h @@ -81,10 +81,18 @@ class SuggestionsService : public KeyedService, public net::URLFetcherDelegate { const GURL& url, base::Callback<void(const GURL&, const SkBitmap*)> callback); - // Issue a blacklist request. If there is already a blacklist or suggestions - // request in flight, the new blacklist request is ignored. + // Adds a URL to the blacklist cache, invoking |callback| on success or + // |fail_callback| otherwise. The URL will eventually be uploaded to the + // server. void BlacklistURL(const GURL& candidate_url, - const ResponseCallback& callback); + const ResponseCallback& callback, + const base::Closure& fail_callback); + + // Removes a URL from the local blacklist, then invokes |callback|. If the URL + // cannot be removed, the |fail_callback| is called. + void UndoBlacklistURL(const GURL& url, + const ResponseCallback& callback, + const base::Closure& fail_callback); // Determines which URL a blacklist request was for, irrespective of the // request's status. Returns false if |request| is not a blacklist request. @@ -103,7 +111,10 @@ class SuggestionsService : public KeyedService, public net::URLFetcherDelegate { private: friend class SuggestionsServiceTest; - FRIEND_TEST_ALL_PREFIXES(SuggestionsServiceTest, BlacklistURLFails); + FRIEND_TEST_ALL_PREFIXES(SuggestionsServiceTest, BlacklistURL); + FRIEND_TEST_ALL_PREFIXES(SuggestionsServiceTest, BlacklistURLRequestFails); + FRIEND_TEST_ALL_PREFIXES(SuggestionsServiceTest, UndoBlacklistURL); + FRIEND_TEST_ALL_PREFIXES(SuggestionsServiceTest, UndoBlacklistURLFailsHelper); FRIEND_TEST_ALL_PREFIXES(SuggestionsServiceTest, UpdateBlacklistDelay); // Creates a request to the suggestions service, properly setting headers. @@ -125,20 +136,19 @@ class SuggestionsService : public KeyedService, public net::URLFetcherDelegate { void FilterAndServe(SuggestionsProfile* suggestions); // Schedules a blacklisting request if the local blacklist isn't empty. - // |last_request_successful| is used for exponentially backing off when - // requests fail. - void ScheduleBlacklistUpload(bool last_request_successful); + void ScheduleBlacklistUpload(); // If the local blacklist isn't empty, picks a URL from it and issues a // blacklist request for it. void UploadOneFromBlacklist(); - // Updates |blacklist_delay_sec_| based on the success of the last request. + // Updates |scheduling_delay_| based on the success of the last request. void UpdateBlacklistDelay(bool last_request_successful); // Test seams. - int blacklist_delay() const { return blacklist_delay_sec_; } - void set_blacklist_delay(int delay) { blacklist_delay_sec_ = delay; } + base::TimeDelta blacklist_delay() const { return scheduling_delay_; } + void set_blacklist_delay(base::TimeDelta delay) { + scheduling_delay_ = delay; } base::ThreadChecker thread_checker_; @@ -154,7 +164,7 @@ class SuggestionsService : public KeyedService, public net::URLFetcherDelegate { scoped_ptr<BlacklistStore> blacklist_store_; // Delay used when scheduling a blacklisting task. - int blacklist_delay_sec_; + base::TimeDelta scheduling_delay_; // Contains the current suggestions fetch request. Will only have a value // while a request is pending, and will be reset by |OnURLFetchComplete| or diff --git a/components/suggestions/suggestions_service_unittest.cc b/components/suggestions/suggestions_service_unittest.cc index 0f79b9c..40d2282 100644 --- a/components/suggestions/suggestions_service_unittest.cc +++ b/components/suggestions/suggestions_service_unittest.cc @@ -29,6 +29,7 @@ #include "testing/gtest/include/gtest/gtest.h" using testing::DoAll; +using ::testing::AnyNumber; using ::testing::Eq; using ::testing::Return; using testing::SetArgPointee; @@ -41,6 +42,7 @@ namespace { const char kTestTitle[] = "a title"; const char kTestUrl[] = "http://go.com"; const char kBlacklistUrl[] = "http://blacklist.com"; +const char kBlacklistUrlAlt[] = "http://blacklist-atl.com"; const int64 kTestDefaultExpiry = 1402200000000000; const int64 kTestSetExpiry = 1404792000000000; @@ -144,7 +146,11 @@ class MockImageManager : public suggestions::ImageManager { class MockBlacklistStore : public suggestions::BlacklistStore { public: MOCK_METHOD1(BlacklistUrl, bool(const GURL&)); - MOCK_METHOD1(GetFirstUrlFromBlacklist, bool(GURL*)); + MOCK_METHOD0(IsEmpty, bool()); + MOCK_METHOD1(GetTimeUntilReadyForUpload, bool(base::TimeDelta*)); + MOCK_METHOD2(GetTimeUntilURLReadyForUpload, + bool(const GURL&, base::TimeDelta*)); + MOCK_METHOD1(GetCandidateForUpload, bool(GURL*)); MOCK_METHOD1(RemoveUrl, bool(const GURL&)); MOCK_METHOD1(FilterSuggestions, void(SuggestionsProfile*)); }; @@ -158,6 +164,14 @@ class SuggestionsServiceTest : public testing::Test { ++suggestions_data_check_count_; } + void SetBlacklistFailure() { + blacklisting_failed_ = true; + } + + void SetUndoBlacklistFailure() { + undo_blacklisting_failed_ = true; + } + void ExpectEmptySuggestionsProfile(const SuggestionsProfile& profile) { EXPECT_EQ(0, profile.suggestions_size()); ++suggestions_empty_data_count_; @@ -165,11 +179,15 @@ class SuggestionsServiceTest : public testing::Test { int suggestions_data_check_count_; int suggestions_empty_data_count_; + bool blacklisting_failed_; + bool undo_blacklisting_failed_; protected: SuggestionsServiceTest() : suggestions_data_check_count_(0), suggestions_empty_data_count_(0), + blacklisting_failed_(false), + undo_blacklisting_failed_(false), factory_(NULL, base::Bind(&CreateURLFetcher)), mock_thumbnail_manager_(NULL), mock_blacklist_store_(NULL), @@ -219,7 +237,7 @@ class SuggestionsServiceTest : public testing::Test { EXPECT_CALL(*mock_thumbnail_manager_, Initialize(EqualsProto(suggestions_profile))); EXPECT_CALL(*mock_blacklist_store_, FilterSuggestions(_)); - EXPECT_CALL(*mock_blacklist_store_, GetFirstUrlFromBlacklist(_)) + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) .WillOnce(Return(false)); // Send the request. The data will be returned to the callback. @@ -252,6 +270,67 @@ class SuggestionsServiceTest : public testing::Test { test_suggestions_store_->StoreSuggestions(CreateSuggestionsProfile()); } + void Blacklist(SuggestionsService* suggestions_service, GURL url) { + suggestions_service->BlacklistURL( + url, + base::Bind(&SuggestionsServiceTest::CheckSuggestionsData, + base::Unretained(this)), + base::Bind(&SuggestionsServiceTest::SetBlacklistFailure, + base::Unretained(this))); + } + + void UndoBlacklist(SuggestionsService* suggestions_service, GURL url) { + suggestions_service->UndoBlacklistURL( + url, + base::Bind(&SuggestionsServiceTest::CheckSuggestionsData, + base::Unretained(this)), + base::Bind(&SuggestionsServiceTest::SetUndoBlacklistFailure, + base::Unretained(this))); + } + + // Helper for Undo failure tests. Depending on |is_uploaded|, tests either + // the case where the URL is no longer in the local blacklist or the case + // in which it's not yet candidate for upload. + void UndoBlacklistURLFailsHelper(bool is_uploaded) { + scoped_ptr<SuggestionsService> suggestions_service( + CreateSuggestionsServiceWithMocks()); + EXPECT_TRUE(suggestions_service != NULL); + // Ensure scheduling the request doesn't happen before undo. + base::TimeDelta delay = base::TimeDelta::FromHours(1); + suggestions_service->set_blacklist_delay(delay); + SuggestionsProfile suggestions_profile = CreateSuggestionsProfile(); + GURL blacklist_url(kBlacklistUrl); + + // Blacklist expectations. + EXPECT_CALL(*mock_blacklist_store_, BlacklistUrl(Eq(blacklist_url))) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_thumbnail_manager_, + Initialize(EqualsProto(suggestions_profile))); + EXPECT_CALL(*mock_blacklist_store_, FilterSuggestions(_)); + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) + .WillOnce(DoAll(SetArgPointee<0>(delay), Return(true))); + // Undo expectations. + if (is_uploaded) { + // URL is not in local blacklist. + EXPECT_CALL(*mock_blacklist_store_, + GetTimeUntilURLReadyForUpload(Eq(blacklist_url), _)) + .WillOnce(Return(false)); + } else { + // URL is not yet candidate for upload. + base::TimeDelta negative_delay = base::TimeDelta::FromHours(-1); + EXPECT_CALL(*mock_blacklist_store_, + GetTimeUntilURLReadyForUpload(Eq(blacklist_url), _)) + .WillOnce(DoAll(SetArgPointee<1>(negative_delay), Return(true))); + } + + Blacklist(suggestions_service.get(), blacklist_url); + UndoBlacklist(suggestions_service.get(), blacklist_url); + + EXPECT_EQ(1, suggestions_data_check_count_); + EXPECT_FALSE(blacklisting_failed_); + EXPECT_TRUE(undo_blacklisting_failed_); + } + protected: base::MessageLoopForIO io_message_loop_; net::FakeURLFetcherFactory factory_; @@ -310,7 +389,7 @@ TEST_F(SuggestionsServiceTest, IssueRequestIfNoneOngoingError) { factory_.SetFakeResponse(GURL(kSuggestionsURL), "irrelevant", net::HTTP_OK, net::URLRequestStatus::FAILED); - EXPECT_CALL(*mock_blacklist_store_, GetFirstUrlFromBlacklist(_)) + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) .WillOnce(Return(false)); // Send the request. Empty data will be returned to the callback. @@ -334,7 +413,7 @@ TEST_F(SuggestionsServiceTest, IssueRequestIfNoneOngoingResponseNotOK) { net::URLRequestStatus::SUCCESS); // Expect that an upload to the blacklist is scheduled. - EXPECT_CALL(*mock_blacklist_store_, GetFirstUrlFromBlacklist(_)) + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) .WillOnce(Return(false)); // Send the request. Empty data will be returned to the callback. @@ -352,6 +431,8 @@ TEST_F(SuggestionsServiceTest, BlacklistURL) { scoped_ptr<SuggestionsService> suggestions_service( CreateSuggestionsServiceWithMocks()); EXPECT_TRUE(suggestions_service != NULL); + base::TimeDelta no_delay = base::TimeDelta::FromSeconds(0); + suggestions_service->set_blacklist_delay(no_delay); GURL blacklist_url(kBlacklistUrl); std::string request_url = GetExpectedBlacklistRequestUrl(blacklist_url); @@ -359,82 +440,149 @@ TEST_F(SuggestionsServiceTest, BlacklistURL) { factory_.SetFakeResponse(GURL(request_url), suggestions_profile.SerializeAsString(), net::HTTP_OK, net::URLRequestStatus::SUCCESS); - EXPECT_CALL(*mock_thumbnail_manager_, Initialize(EqualsProto(suggestions_profile))); // Expected calls to the blacklist store. EXPECT_CALL(*mock_blacklist_store_, BlacklistUrl(Eq(blacklist_url))) .WillOnce(Return(true)); - EXPECT_CALL(*mock_blacklist_store_, RemoveUrl(Eq(blacklist_url))) - .WillOnce(Return(true)); EXPECT_CALL(*mock_blacklist_store_, FilterSuggestions(_)); - EXPECT_CALL(*mock_blacklist_store_, GetFirstUrlFromBlacklist(_)) + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) + .WillOnce(DoAll(SetArgPointee<0>(no_delay), Return(true))) .WillOnce(Return(false)); + EXPECT_CALL(*mock_blacklist_store_, GetCandidateForUpload(_)) + .WillOnce(DoAll(SetArgPointee<0>(blacklist_url), Return(true))); + EXPECT_CALL(*mock_blacklist_store_, RemoveUrl(Eq(blacklist_url))) + .WillOnce(Return(true)); - // Send the request. The data will be returned to the callback. - suggestions_service->BlacklistURL( - blacklist_url, base::Bind(&SuggestionsServiceTest::CheckSuggestionsData, - base::Unretained(this))); + Blacklist(suggestions_service.get(), blacklist_url); + + // Wait on the upload task. This only works when the scheduling task is not + // for future execution (note how both the SuggestionsService's scheduling + // delay and the BlacklistStore's candidacy delay are zero). Then wait on + // the blacklist request, then again on the next blacklist scheduling task. + base::MessageLoop::current()->RunUntilIdle(); + io_message_loop_.RunUntilIdle(); + base::MessageLoop::current()->RunUntilIdle(); // Ensure that CheckSuggestionsData() ran once. EXPECT_EQ(1, suggestions_data_check_count_); - - // (Testing only) wait until blacklist request is complete. - io_message_loop_.RunUntilIdle(); + EXPECT_FALSE(blacklisting_failed_); } -// Initial blacklist request fails, triggering a scheduled upload which -// succeeds. TEST_F(SuggestionsServiceTest, BlacklistURLFails) { scoped_ptr<SuggestionsService> suggestions_service( CreateSuggestionsServiceWithMocks()); EXPECT_TRUE(suggestions_service != NULL); - suggestions_service->set_blacklist_delay(0); // Don't wait during a test! - SuggestionsProfile suggestions_profile = CreateSuggestionsProfile(); GURL blacklist_url(kBlacklistUrl); - - // Expectations specific to the synchronous pass. EXPECT_CALL(*mock_blacklist_store_, BlacklistUrl(Eq(blacklist_url))) - .WillOnce(Return(true)); - EXPECT_CALL(*mock_thumbnail_manager_, - Initialize(EqualsProto(suggestions_profile))); - EXPECT_CALL(*mock_blacklist_store_, FilterSuggestions(_)); + .WillOnce(Return(false)); - // Expectations specific to the second request. - EXPECT_CALL(*mock_blacklist_store_, RemoveUrl(Eq(blacklist_url))) - .WillOnce(Return(true)); + Blacklist(suggestions_service.get(), blacklist_url); - // Expectations pertaining to both requests. - EXPECT_CALL(*mock_blacklist_store_, GetFirstUrlFromBlacklist(_)) - .WillOnce(Return(true)) - .WillOnce(DoAll(SetArgPointee<0>(blacklist_url), Return(true))) - .WillOnce(Return(false)); + EXPECT_TRUE(blacklisting_failed_); + EXPECT_EQ(0, suggestions_data_check_count_); +} + +// Initial blacklist request fails, triggering a second which succeeds. +TEST_F(SuggestionsServiceTest, BlacklistURLRequestFails) { + scoped_ptr<SuggestionsService> suggestions_service( + CreateSuggestionsServiceWithMocks()); + EXPECT_TRUE(suggestions_service != NULL); + base::TimeDelta no_delay = base::TimeDelta::FromSeconds(0); + suggestions_service->set_blacklist_delay(no_delay); - // Set up behavior for the first call to blacklist. + GURL blacklist_url(kBlacklistUrl); std::string request_url = GetExpectedBlacklistRequestUrl(blacklist_url); + GURL blacklist_url_alt(kBlacklistUrlAlt); + std::string request_url_alt = GetExpectedBlacklistRequestUrl( + blacklist_url_alt); + SuggestionsProfile suggestions_profile = CreateSuggestionsProfile(); + + // Note: we want to set the response for the blacklist URL to first + // succeed, then fail. This doesn't seem possible. For simplicity of testing, + // we'll pretend the URL changed in the BlacklistStore between the first and + // the second request, and adjust expectations accordingly. factory_.SetFakeResponse(GURL(request_url), "irrelevant", net::HTTP_OK, net::URLRequestStatus::FAILED); + factory_.SetFakeResponse(GURL(request_url_alt), + suggestions_profile.SerializeAsString(), + net::HTTP_OK, net::URLRequestStatus::SUCCESS); - // Send the request. The data will be returned to the callback immediately. - suggestions_service->BlacklistURL( - blacklist_url, base::Bind(&SuggestionsServiceTest::CheckSuggestionsData, - base::Unretained(this))); + // Expectations. + EXPECT_CALL(*mock_thumbnail_manager_, + Initialize(EqualsProto(suggestions_profile))); + EXPECT_CALL(*mock_blacklist_store_, BlacklistUrl(Eq(blacklist_url))) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_blacklist_store_, FilterSuggestions(_)); + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) + .WillOnce(DoAll(SetArgPointee<0>(no_delay), Return(true))) + .WillOnce(DoAll(SetArgPointee<0>(no_delay), Return(true))) + .WillOnce(Return(false)); + EXPECT_CALL(*mock_blacklist_store_, GetCandidateForUpload(_)) + .WillOnce(DoAll(SetArgPointee<0>(blacklist_url), Return(true))) + .WillOnce(DoAll(SetArgPointee<0>(blacklist_url_alt), Return(true))); + EXPECT_CALL(*mock_blacklist_store_, RemoveUrl(Eq(blacklist_url_alt))) + .WillOnce(Return(true)); - // Ensure that CheckSuggestionsData() ran once. + // Blacklist call, first request attempt. + Blacklist(suggestions_service.get(), blacklist_url); EXPECT_EQ(1, suggestions_data_check_count_); + EXPECT_FALSE(blacklisting_failed_); - // We can now set up behavior for the second call to blacklist. - factory_.SetFakeResponse(GURL(request_url), - suggestions_profile.SerializeAsString(), - net::HTTP_OK, net::URLRequestStatus::SUCCESS); - - // Wait until first request is complete. + // Wait for the first scheduling, the first request, the second scheduling, + // second request and the third scheduling. Again, note that calling + // RunUntilIdle on the MessageLoop only works when the task is not posted for + // the future. + base::MessageLoop::current()->RunUntilIdle(); io_message_loop_.RunUntilIdle(); - // ... Other task gets posted to the message loop. base::MessageLoop::current()->RunUntilIdle(); - // ... And completes. io_message_loop_.RunUntilIdle(); + base::MessageLoop::current()->RunUntilIdle(); +} + +TEST_F(SuggestionsServiceTest, UndoBlacklistURL) { + scoped_ptr<SuggestionsService> suggestions_service( + CreateSuggestionsServiceWithMocks()); + EXPECT_TRUE(suggestions_service != NULL); + // Ensure scheduling the request doesn't happen before undo. + base::TimeDelta delay = base::TimeDelta::FromHours(1); + suggestions_service->set_blacklist_delay(delay); + SuggestionsProfile suggestions_profile = CreateSuggestionsProfile(); + GURL blacklist_url(kBlacklistUrl); + + // Blacklist expectations. + EXPECT_CALL(*mock_blacklist_store_, BlacklistUrl(Eq(blacklist_url))) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_thumbnail_manager_, + Initialize(EqualsProto(suggestions_profile))) + .Times(AnyNumber()); + EXPECT_CALL(*mock_blacklist_store_, FilterSuggestions(_)) + .Times(AnyNumber()); + EXPECT_CALL(*mock_blacklist_store_, GetTimeUntilReadyForUpload(_)) + .WillOnce(DoAll(SetArgPointee<0>(delay), Return(true))); + // Undo expectations. + EXPECT_CALL(*mock_blacklist_store_, + GetTimeUntilURLReadyForUpload(Eq(blacklist_url), _)) + .WillOnce(DoAll(SetArgPointee<1>(delay), Return(true))); + EXPECT_CALL(*mock_blacklist_store_, RemoveUrl(Eq(blacklist_url))) + .WillOnce(Return(true)); + + Blacklist(suggestions_service.get(), blacklist_url); + UndoBlacklist(suggestions_service.get(), blacklist_url); + + EXPECT_EQ(2, suggestions_data_check_count_); + EXPECT_FALSE(blacklisting_failed_); + EXPECT_FALSE(undo_blacklisting_failed_); +} + + +TEST_F(SuggestionsServiceTest, UndoBlacklistURLFailsIfNotInBlacklist) { + UndoBlacklistURLFailsHelper(true); +} + +TEST_F(SuggestionsServiceTest, UndoBlacklistURLFailsIfAlreadyCandidate) { + UndoBlacklistURLFailsHelper(false); } TEST_F(SuggestionsServiceTest, GetBlacklistedUrl) { @@ -465,7 +613,7 @@ TEST_F(SuggestionsServiceTest, GetBlacklistedUrl) { TEST_F(SuggestionsServiceTest, UpdateBlacklistDelay) { scoped_ptr<SuggestionsService> suggestions_service( CreateSuggestionsServiceWithMocks()); - int initial_delay = suggestions_service->blacklist_delay(); + base::TimeDelta initial_delay = suggestions_service->blacklist_delay(); // Delay unchanged on success. suggestions_service->UpdateBlacklistDelay(true); |