summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authordubroy@chromium.org <dubroy@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-18 16:31:45 +0000
committerdubroy@chromium.org <dubroy@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-18 16:31:45 +0000
commita8d379cc2009e1104f6ac1d8796fe738cf41eee8 (patch)
treecb3ca2644fe58df1dafa32d971977d8f4e839eb7
parentde81090e314e82251cdcbd5f7381b6a478e8a02b (diff)
downloadchromium_src-a8d379cc2009e1104f6ac1d8796fe738cf41eee8.zip
chromium_src-a8d379cc2009e1104f6ac1d8796fe738cf41eee8.tar.gz
chromium_src-a8d379cc2009e1104f6ac1d8796fe738cf41eee8.tar.bz2
History: Pass min/max timestamps as query parameters to history server.
Queries to the history server should match the same time range as the query to the history backend. Also de-dupe and sort results before handing them to the JS frontend. BUG=None Committed: https://src.chromium.org/viewvc/chrome?view=rev&revision=183116 Review URL: https://codereview.chromium.org/12217125 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@183129 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--base/values.cc4
-rw-r--r--base/values.h4
-rw-r--r--chrome/browser/history/web_history_service.cc42
-rw-r--r--chrome/browser/resources/history/history.js29
-rw-r--r--chrome/browser/ui/webui/history_ui.cc103
-rw-r--r--chrome/browser/ui/webui/history_ui.h15
-rw-r--r--chrome/browser/ui/webui/history_ui_unittest.cc119
-rw-r--r--chrome/chrome_tests_unit.gypi1
8 files changed, 259 insertions, 58 deletions
diff --git a/base/values.cc b/base/values.cc
index 3712d46..4768774 100644
--- a/base/values.cc
+++ b/base/values.cc
@@ -1000,13 +1000,13 @@ bool ListValue::Remove(const Value& value, size_t* index) {
return false;
}
-void ListValue::Erase(iterator iter, Value** out_value) {
+ListValue::iterator ListValue::Erase(iterator iter, Value** out_value) {
if (out_value)
*out_value = *iter;
else
delete *iter;
- list_.erase(iter);
+ return list_.erase(iter);
}
void ListValue::Append(Value* in_value) {
diff --git a/base/values.h b/base/values.h
index bb23ffc..8c87844 100644
--- a/base/values.h
+++ b/base/values.h
@@ -453,7 +453,9 @@ class BASE_EXPORT ListValue : public Value {
// Removes the element at |iter|. If |out_value| is NULL, the value will be
// deleted, otherwise ownership of the value is passed back to the caller.
- void Erase(iterator iter, Value** out_value);
+ // Returns an iterator pointing to the location of the element that
+ // followed the erased element.
+ iterator Erase(iterator iter, Value** out_value);
// Appends a Value to the end of the list.
void Append(Value* in_value);
diff --git a/chrome/browser/history/web_history_service.cc b/chrome/browser/history/web_history_service.cc
index 14d71e1..e858c9e 100644
--- a/chrome/browser/history/web_history_service.cc
+++ b/chrome/browser/history/web_history_service.cc
@@ -15,6 +15,7 @@
#include "google_apis/gaia/google_service_auth_error.h"
#include "googleurl/src/gurl.h"
#include "net/base/load_flags.h"
+#include "net/base/url_util.h"
#include "net/http/http_status_code.h"
#include "net/url_request/url_fetcher.h"
#include "net/url_request/url_fetcher_delegate.h"
@@ -168,6 +169,39 @@ void QueryHistoryCompletionCallback(
callback.Run(request, NULL);
}
+// Converts a time into a string for use as a parameter in a request to the
+// history server.
+std::string ServerTimeString(base::Time time) {
+ return base::Int64ToString((time - base::Time::UnixEpoch()).InMicroseconds());
+}
+
+// Returns a URL for querying the history server for a query specified by
+// |options|.
+std::string GetQueryUrl(const QueryOptions& options) {
+ GURL url = GURL(kHistoryQueryHistoryUrl);
+ url = net::AppendQueryParameter(url, "titles", "1");
+
+ // Take |begin_time|, |end_time|, and |max_count| from the original query
+ // options, and convert them to the equivalent URL parameters.
+
+ base::Time end_time =
+ std::min(base::Time::FromInternalValue(options.EffectiveEndTime()),
+ base::Time::Now());
+ url = net::AppendQueryParameter(url, "max", ServerTimeString(end_time));
+
+ if (!options.begin_time.is_null()) {
+ url = net::AppendQueryParameter(
+ url, "min", ServerTimeString(options.begin_time));
+ }
+
+ if (options.max_count) {
+ url = net::AppendQueryParameter(
+ url, "num", base::IntToString(options.max_count));
+ }
+
+ return url.spec();
+}
+
} // namespace
WebHistoryService::Request::Request() {
@@ -192,7 +226,7 @@ scoped_ptr<WebHistoryService::Request> WebHistoryService::QueryHistory(
&QueryHistoryCompletionCallback, callback);
scoped_ptr<RequestImpl> request(
- new RequestImpl(profile_, kHistoryQueryHistoryUrl, completion_callback));
+ new RequestImpl(profile_, GetQueryUrl(options), completion_callback));
request->Start();
return request.PassAs<Request>();
}
@@ -204,10 +238,8 @@ scoped_ptr<WebHistoryService::Request> WebHistoryService::ExpireHistoryBetween(
const WebHistoryService::ExpireWebHistoryCallback& callback) {
// Determine the timestamps representing the beginning and end of the day.
- std::string min_timestamp(base::Int64ToString(
- (begin_time - base::Time::FromJsTime(0)).InMicroseconds()));
- std::string max_timestamp(base::Int64ToString(
- (end_time - base::Time::FromJsTime(0)).InMicroseconds()));
+ std::string min_timestamp = ServerTimeString(begin_time);
+ std::string max_timestamp = ServerTimeString(end_time);
DictionaryValue delete_request;
ListValue* deletions = new ListValue;
diff --git a/chrome/browser/resources/history/history.js b/chrome/browser/resources/history/history.js
index e25005b1..b49b443 100644
--- a/chrome/browser/resources/history/history.js
+++ b/chrome/browser/resources/history/history.js
@@ -393,7 +393,6 @@ HistoryModel.prototype.getSearchText = function() {
*/
HistoryModel.prototype.requestPage = function(page) {
this.requestedPage_ = page;
- this.changed = true;
this.updateSearch_();
};
@@ -415,34 +414,13 @@ HistoryModel.prototype.addResults = function(info, results) {
if (info.term != this.searchText_)
return;
- // If necessary, sort the results from newest to oldest.
- if (!results.sorted)
- results.sort(function(a, b) { return b.time - a.time; });
-
var lastVisit = this.visits_.slice(-1)[0];
var lastDay = lastVisit ? lastVisit.dateRelativeDay : null;
for (var i = 0, thisResult; thisResult = results[i]; i++) {
var thisDay = thisResult.dateRelativeDay;
var isSameDay = lastDay == thisDay;
-
- // Keep track of all URLs seen on a particular day, and only use the
- // latest visit from that day.
- if (!isSameDay)
- this.urlsFromLastSeenDay_ = {};
-
- // Only create a new Visit object for the first visit to a URL on a
- // particular day.
- var visit = this.urlsFromLastSeenDay_[thisResult.url];
- if (visit) {
- // Record the timestamp of the duplicate visit.
- visit.addDuplicateTimestamp(thisResult.timestamp);
- continue;
- }
- visit = new Visit(thisResult, isSameDay, this);
- this.urlsFromLastSeenDay_[thisResult.url] = visit;
- this.visits_.push(visit);
- this.changed = true;
+ this.visits_.push(new Visit(thisResult, isSameDay, this));
lastDay = thisDay;
}
@@ -543,11 +521,6 @@ HistoryModel.prototype.clearModel_ = function() {
// fetch the next page of results for a query.
this.queryCursor_ = null;
- // A map of URLs of visits on the same day as the last known visit.
- // This is used for de-duping URLs, so that we only show the most recent
- // visit to a URL on any day.
- this.urlsFromLastSeenDay_ = {};
-
if (this.view_)
this.view_.clear_();
};
diff --git a/chrome/browser/ui/webui/history_ui.cc b/chrome/browser/ui/webui/history_ui.cc
index 33b1a5b..97fae46 100644
--- a/chrome/browser/ui/webui/history_ui.cc
+++ b/chrome/browser/ui/webui/history_ui.cc
@@ -172,6 +172,7 @@ string16 getRelativeDateLocalized(const base::Time& visit_time) {
return date_str;
}
+
// Sets the correct year when substracting months from a date.
void normalizeMonths(base::Time::Exploded* exploded) {
// Decrease a year at a time until we have a proper date.
@@ -181,6 +182,35 @@ void normalizeMonths(base::Time::Exploded* exploded) {
}
}
+// Comparison function for sorting history results from newest to oldest.
+bool SortByTimeDescending(Value* value1, Value* value2) {
+ DictionaryValue* result1;
+ DictionaryValue* result2;
+ double time1;
+ double time2;
+
+ if (value1->GetAsDictionary(&result1) && result1->GetDouble("time", &time1) &&
+ value2->GetAsDictionary(&result2) && result2->GetDouble("time", &time2)) {
+ return time1 > time2;
+ }
+ NOTREACHED() << "Unable to extract values";
+ return true;
+}
+
+// Returns the URL of a query result value.
+bool GetResultTimeAndUrl(Value* result, base::Time* time, string16* url) {
+ DictionaryValue* result_dict;
+ double timestamp;
+
+ if (result->GetAsDictionary(&result_dict) &&
+ result_dict->GetDouble("time", &timestamp) &&
+ result_dict->GetString("url", url)) {
+ *time = base::Time::FromJsTime(timestamp);
+ return true;
+ }
+ return false;
+}
+
} // namespace
////////////////////////////////////////////////////////////////////////////////
@@ -234,7 +264,7 @@ bool BrowsingHistoryHandler::ExtractIntegerValueAtIndex(const ListValue* value,
void BrowsingHistoryHandler::WebHistoryTimeout() {
// TODO(dubroy): Communicate the failure to the front end.
if (!results_info_value_.empty())
- ReturnResultsToFrontEnd();
+ ReturnResultsToFrontEnd(false, 0);
}
void BrowsingHistoryHandler::QueryHistory(
@@ -395,7 +425,7 @@ void BrowsingHistoryHandler::HandleRemoveBookmark(const ListValue* args) {
}
DictionaryValue* BrowsingHistoryHandler::CreateQueryResultValue(
- const GURL& url, const string16 title, base::Time visit_time,
+ const GURL& url, const string16& title, base::Time visit_time,
bool is_search_result, const string16& snippet) {
DictionaryValue* result = new DictionaryValue();
SetURLAndTitle(result, title, url);
@@ -434,7 +464,53 @@ DictionaryValue* BrowsingHistoryHandler::CreateQueryResultValue(
return result;
}
-void BrowsingHistoryHandler::ReturnResultsToFrontEnd() {
+void BrowsingHistoryHandler::RemoveDuplicateResults(ListValue* results) {
+ ListValue new_results;
+ std::set<string16> current_day_urls; // Holds the unique URLs for a day.
+
+ // Keeps track of the day that |current_day_urls| is holding the URLs for,
+ // in order to handle removing per-day duplicates.
+ base::Time current_day_midnight;
+
+ for (ListValue::iterator it = results->begin(); it != results->end(); ) {
+ Value* visit_value;
+ base::Time visit_time;
+ string16 url;
+
+ it = results->Erase(it, &visit_value);
+ GetResultTimeAndUrl(visit_value, &visit_time, &url);
+
+ if (current_day_midnight != visit_time.LocalMidnight()) {
+ current_day_urls.clear();
+ current_day_midnight = visit_time.LocalMidnight();
+ }
+
+ // Keep this visit if it's the first visit to this URL on the current day.
+ if (current_day_urls.count(url) == 0) {
+ current_day_urls.insert(url);
+ new_results.Append(visit_value);
+ } else {
+ delete visit_value;
+ }
+ }
+ results->Swap(&new_results);
+}
+
+void BrowsingHistoryHandler::ReturnResultsToFrontEnd(
+ bool results_need_merge, int max_count) {
+ if (results_need_merge) {
+ std::sort(results_value_.begin(),
+ results_value_.end(),
+ SortByTimeDescending);
+ RemoveDuplicateResults(&results_value_);
+ }
+
+ // Truncate the result set if necessary.
+ if (max_count > 0) {
+ for (int i = results_value_.GetSize(); i > max_count; --i)
+ results_value_.Remove(i - 1, NULL);
+ }
+
web_ui()->CallJavascriptFunction(
"historyResult", results_info_value_, results_value_);
results_info_value_.Clear();
@@ -446,7 +522,8 @@ void BrowsingHistoryHandler::QueryComplete(
const history::QueryOptions& options,
HistoryService::Handle request_handle,
history::QueryResults* results) {
- unsigned int old_results_count = results_value_.GetSize();
+ // If we're appending to existing results, they will need merging.
+ bool needs_merge = results_value_.GetSize() > 0;
for (size_t i = 0; i < results->size(); ++i) {
history::URLResult const &page = (*results)[i];
@@ -471,12 +548,8 @@ void BrowsingHistoryHandler::QueryComplete(
results_info_value_.SetString("queryEndTime",
getRelativeDateLocalized(base::Time::Now()));
}
-
- // The results are sorted if and only if they were empty before.
- results_info_value_.SetBoolean("sorted", old_results_count == 0);
-
if (!web_history_timer_.IsRunning())
- ReturnResultsToFrontEnd();
+ ReturnResultsToFrontEnd(needs_merge, options.max_count);
}
void BrowsingHistoryHandler::WebHistoryQueryComplete(
@@ -486,11 +559,6 @@ void BrowsingHistoryHandler::WebHistoryQueryComplete(
const DictionaryValue* results_value) {
web_history_timer_.Stop();
- // Check if the results have already been received from the history DB.
- // It is unlikely, but possible, that server results will arrive first.
- bool has_local_results = !results_info_value_.empty();
- unsigned int old_results_count = results_value_.GetSize();
-
const ListValue* events = NULL;
if (results_value && results_value->GetList("event", &events)) {
for (unsigned int i = 0; i < events->GetSize(); ++i) {
@@ -532,14 +600,11 @@ void BrowsingHistoryHandler::WebHistoryQueryComplete(
}
}
}
- // The results are sorted if and only if they were empty before.
- results_info_value_.SetBoolean("sorted", old_results_count == 0);
} else if (results_value) {
NOTREACHED() << "Failed to parse JSON response.";
}
-
- if (has_local_results)
- ReturnResultsToFrontEnd();
+ if (!results_info_value_.empty())
+ ReturnResultsToFrontEnd(true, options.max_count);
}
void BrowsingHistoryHandler::RemoveComplete() {
diff --git a/chrome/browser/ui/webui/history_ui.h b/chrome/browser/ui/webui/history_ui.h
index d375b9a..608b383 100644
--- a/chrome/browser/ui/webui/history_ui.h
+++ b/chrome/browser/ui/webui/history_ui.h
@@ -45,6 +45,11 @@ class BrowsingHistoryHandler : public content::WebUIMessageHandler,
const content::NotificationSource& source,
const content::NotificationDetails& details) OVERRIDE;
+ // Removes duplicate visits from the given list of query results, only
+ // retaining the most recent visit to a URL on a particular day. |results|
+ // must already be sorted by visit time, most recent first.
+ static void RemoveDuplicateResults(ListValue* results);
+
private:
// The range for which to return results:
// - ALLTIME: allows access to all the results in a paginated way.
@@ -61,11 +66,15 @@ class BrowsingHistoryHandler : public content::WebUIMessageHandler,
// Creates a history query result value.
DictionaryValue* CreateQueryResultValue(
- const GURL& url, const string16 title, base::Time visit_time,
+ const GURL& url, const string16& title, base::Time visit_time,
bool is_search_result, const string16& snippet);
- // Helper to send the accumulated results of the query to the front end.
- void ReturnResultsToFrontEnd();
+ // Sends the accumulated results of the query to the front end, truncating
+ // the number to |max_count| if necessary. If |max_count| is 0, the results
+ // are not truncated.
+ // If |remove_duplicates| is true, duplicate visits on the same day are
+ // removed.
+ void ReturnResultsToFrontEnd(bool remove_duplicates, int max_count);
// Callback from |web_history_timer_| when a response from web history has
// not been received in time.
diff --git a/chrome/browser/ui/webui/history_ui_unittest.cc b/chrome/browser/ui/webui/history_ui_unittest.cc
new file mode 100644
index 0000000..72c5eed
--- /dev/null
+++ b/chrome/browser/ui/webui/history_ui_unittest.cc
@@ -0,0 +1,119 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "history_ui.h"
+
+#include "base/utf_string_conversions.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace {
+
+struct TestResult {
+ std::string url;
+ int64 hour_offset; // Visit time in hours past the baseline time.
+};
+
+// Duplicates on the same day in the local timezone are removed, so set a
+// baseline time in local time.
+const base::Time baseline_time = base::Time::UnixEpoch().LocalMidnight();
+
+// For each item in |results|, create a new Value representing the visit, and
+// insert it into |list_value|.
+void AddResultsToList(TestResult* results,
+ int results_size,
+ ListValue* list_value) {
+ for (int i = 0; i < results_size; ++i) {
+ DictionaryValue* result = new DictionaryValue;
+ result->SetString("url", results[i].url);
+ base::Time time =
+ baseline_time + base::TimeDelta::FromHours(results[i].hour_offset);
+ result->SetDouble("time", time.ToJsTime());
+ list_value->Append(result);
+ }
+}
+
+// Returns true if the result at |index| in |results| matches the test data
+// given by |correct_result|, otherwise returns false.
+bool ResultEquals(
+ const ListValue& results, int index, TestResult correct_result) {
+ const DictionaryValue* result;
+ string16 url;
+ double timestamp;
+
+ if (results.GetDictionary(index, &result) &&
+ result->GetDouble("time", &timestamp) &&
+ result->GetString("url", &url)) {
+ base::Time correct_time =
+ baseline_time + base::TimeDelta::FromHours(correct_result.hour_offset);
+ return base::Time::FromJsTime(timestamp) == correct_time &&
+ url == ASCIIToUTF16(correct_result.url);
+ }
+ NOTREACHED();
+ return false;
+}
+
+} // namespace
+
+// Tests that the RemoveDuplicateResults method correctly removes duplicate
+// visits to the same URL on the same day.
+TEST(HistoryUITest, RemoveDuplicateResults) {
+ {
+ // Basic test that duplicates on the same day are removed.
+ TestResult test_data[] = {
+ { "http://google.com", 0 },
+ { "http://google.de", 1 },
+ { "http://google.com", 2 },
+ { "http://google.com", 3 }
+ };
+ ListValue results;
+ AddResultsToList(test_data, arraysize(test_data), &results);
+ BrowsingHistoryHandler::RemoveDuplicateResults(&results);
+
+ ASSERT_EQ(2U, results.GetSize());
+ EXPECT_TRUE(ResultEquals(results, 0, test_data[0]));
+ EXPECT_TRUE(ResultEquals(results, 1, test_data[1]));
+ }
+
+ {
+ // Test that a duplicate URL on the next day is not removed.
+ TestResult test_data[] = {
+ { "http://google.com", 0 },
+ { "http://google.com", 23 },
+ { "http://google.com", 24 },
+ };
+ ListValue results;
+ AddResultsToList(test_data, arraysize(test_data), &results);
+ BrowsingHistoryHandler::RemoveDuplicateResults(&results);
+
+ ASSERT_EQ(2U, results.GetSize());
+ EXPECT_TRUE(ResultEquals(results, 0, test_data[0]));
+ EXPECT_TRUE(ResultEquals(results, 1, test_data[2]));
+ }
+
+ {
+ // Test multiple duplicates across multiple days.
+ TestResult test_data[] = {
+ // First day.
+ { "http://google.de", 0 },
+ { "http://google.com", 1 },
+ { "http://google.de", 2 },
+ { "http://google.com", 3 },
+
+ // Second day.
+ { "http://google.de", 24 },
+ { "http://google.com", 25 },
+ { "http://google.de", 26 },
+ { "http://google.com", 27 },
+ };
+ ListValue results;
+ AddResultsToList(test_data, arraysize(test_data), &results);
+ BrowsingHistoryHandler::RemoveDuplicateResults(&results);
+
+ ASSERT_EQ(4U, results.GetSize());
+ EXPECT_TRUE(ResultEquals(results, 0, test_data[0]));
+ EXPECT_TRUE(ResultEquals(results, 1, test_data[1]));
+ EXPECT_TRUE(ResultEquals(results, 2, test_data[4]));
+ EXPECT_TRUE(ResultEquals(results, 3, test_data[5]));
+ }
+}
diff --git a/chrome/chrome_tests_unit.gypi b/chrome/chrome_tests_unit.gypi
index 6cfbd96..eb9a48b 100644
--- a/chrome/chrome_tests_unit.gypi
+++ b/chrome/chrome_tests_unit.gypi
@@ -1444,6 +1444,7 @@
'browser/ui/web_contents_modal_dialog_manager_unittest.cc',
'browser/ui/website_settings/website_settings_unittest.cc',
'browser/ui/webui/fileicon_source_unittest.cc',
+ 'browser/ui/webui/history_ui_unittest.cc',
'browser/ui/webui/ntp/android/partner_bookmarks_shim_unittest.cc',
'browser/ui/webui/ntp/suggestions_combiner_unittest.cc',
'browser/ui/webui/options/language_options_handler_unittest.cc',