diff options
Diffstat (limited to 'chrome/browser/ui/webui')
-rw-r--r-- | chrome/browser/ui/webui/history_ui.cc | 103 | ||||
-rw-r--r-- | chrome/browser/ui/webui/history_ui.h | 15 | ||||
-rw-r--r-- | chrome/browser/ui/webui/history_ui_unittest.cc | 119 |
3 files changed, 215 insertions, 22 deletions
diff --git a/chrome/browser/ui/webui/history_ui.cc b/chrome/browser/ui/webui/history_ui.cc index 33b1a5b..97fae46 100644 --- a/chrome/browser/ui/webui/history_ui.cc +++ b/chrome/browser/ui/webui/history_ui.cc @@ -172,6 +172,7 @@ string16 getRelativeDateLocalized(const base::Time& visit_time) { return date_str; } + // Sets the correct year when substracting months from a date. void normalizeMonths(base::Time::Exploded* exploded) { // Decrease a year at a time until we have a proper date. @@ -181,6 +182,35 @@ void normalizeMonths(base::Time::Exploded* exploded) { } } +// Comparison function for sorting history results from newest to oldest. +bool SortByTimeDescending(Value* value1, Value* value2) { + DictionaryValue* result1; + DictionaryValue* result2; + double time1; + double time2; + + if (value1->GetAsDictionary(&result1) && result1->GetDouble("time", &time1) && + value2->GetAsDictionary(&result2) && result2->GetDouble("time", &time2)) { + return time1 > time2; + } + NOTREACHED() << "Unable to extract values"; + return true; +} + +// Returns the URL of a query result value. +bool GetResultTimeAndUrl(Value* result, base::Time* time, string16* url) { + DictionaryValue* result_dict; + double timestamp; + + if (result->GetAsDictionary(&result_dict) && + result_dict->GetDouble("time", ×tamp) && + result_dict->GetString("url", url)) { + *time = base::Time::FromJsTime(timestamp); + return true; + } + return false; +} + } // namespace //////////////////////////////////////////////////////////////////////////////// @@ -234,7 +264,7 @@ bool BrowsingHistoryHandler::ExtractIntegerValueAtIndex(const ListValue* value, void BrowsingHistoryHandler::WebHistoryTimeout() { // TODO(dubroy): Communicate the failure to the front end. if (!results_info_value_.empty()) - ReturnResultsToFrontEnd(); + ReturnResultsToFrontEnd(false, 0); } void BrowsingHistoryHandler::QueryHistory( @@ -395,7 +425,7 @@ void BrowsingHistoryHandler::HandleRemoveBookmark(const ListValue* args) { } DictionaryValue* BrowsingHistoryHandler::CreateQueryResultValue( - const GURL& url, const string16 title, base::Time visit_time, + const GURL& url, const string16& title, base::Time visit_time, bool is_search_result, const string16& snippet) { DictionaryValue* result = new DictionaryValue(); SetURLAndTitle(result, title, url); @@ -434,7 +464,53 @@ DictionaryValue* BrowsingHistoryHandler::CreateQueryResultValue( return result; } -void BrowsingHistoryHandler::ReturnResultsToFrontEnd() { +void BrowsingHistoryHandler::RemoveDuplicateResults(ListValue* results) { + ListValue new_results; + std::set<string16> current_day_urls; // Holds the unique URLs for a day. + + // Keeps track of the day that |current_day_urls| is holding the URLs for, + // in order to handle removing per-day duplicates. + base::Time current_day_midnight; + + for (ListValue::iterator it = results->begin(); it != results->end(); ) { + Value* visit_value; + base::Time visit_time; + string16 url; + + it = results->Erase(it, &visit_value); + GetResultTimeAndUrl(visit_value, &visit_time, &url); + + if (current_day_midnight != visit_time.LocalMidnight()) { + current_day_urls.clear(); + current_day_midnight = visit_time.LocalMidnight(); + } + + // Keep this visit if it's the first visit to this URL on the current day. + if (current_day_urls.count(url) == 0) { + current_day_urls.insert(url); + new_results.Append(visit_value); + } else { + delete visit_value; + } + } + results->Swap(&new_results); +} + +void BrowsingHistoryHandler::ReturnResultsToFrontEnd( + bool results_need_merge, int max_count) { + if (results_need_merge) { + std::sort(results_value_.begin(), + results_value_.end(), + SortByTimeDescending); + RemoveDuplicateResults(&results_value_); + } + + // Truncate the result set if necessary. + if (max_count > 0) { + for (int i = results_value_.GetSize(); i > max_count; --i) + results_value_.Remove(i - 1, NULL); + } + web_ui()->CallJavascriptFunction( "historyResult", results_info_value_, results_value_); results_info_value_.Clear(); @@ -446,7 +522,8 @@ void BrowsingHistoryHandler::QueryComplete( const history::QueryOptions& options, HistoryService::Handle request_handle, history::QueryResults* results) { - unsigned int old_results_count = results_value_.GetSize(); + // If we're appending to existing results, they will need merging. + bool needs_merge = results_value_.GetSize() > 0; for (size_t i = 0; i < results->size(); ++i) { history::URLResult const &page = (*results)[i]; @@ -471,12 +548,8 @@ void BrowsingHistoryHandler::QueryComplete( results_info_value_.SetString("queryEndTime", getRelativeDateLocalized(base::Time::Now())); } - - // The results are sorted if and only if they were empty before. - results_info_value_.SetBoolean("sorted", old_results_count == 0); - if (!web_history_timer_.IsRunning()) - ReturnResultsToFrontEnd(); + ReturnResultsToFrontEnd(needs_merge, options.max_count); } void BrowsingHistoryHandler::WebHistoryQueryComplete( @@ -486,11 +559,6 @@ void BrowsingHistoryHandler::WebHistoryQueryComplete( const DictionaryValue* results_value) { web_history_timer_.Stop(); - // Check if the results have already been received from the history DB. - // It is unlikely, but possible, that server results will arrive first. - bool has_local_results = !results_info_value_.empty(); - unsigned int old_results_count = results_value_.GetSize(); - const ListValue* events = NULL; if (results_value && results_value->GetList("event", &events)) { for (unsigned int i = 0; i < events->GetSize(); ++i) { @@ -532,14 +600,11 @@ void BrowsingHistoryHandler::WebHistoryQueryComplete( } } } - // The results are sorted if and only if they were empty before. - results_info_value_.SetBoolean("sorted", old_results_count == 0); } else if (results_value) { NOTREACHED() << "Failed to parse JSON response."; } - - if (has_local_results) - ReturnResultsToFrontEnd(); + if (!results_info_value_.empty()) + ReturnResultsToFrontEnd(true, options.max_count); } void BrowsingHistoryHandler::RemoveComplete() { diff --git a/chrome/browser/ui/webui/history_ui.h b/chrome/browser/ui/webui/history_ui.h index d375b9a..608b383 100644 --- a/chrome/browser/ui/webui/history_ui.h +++ b/chrome/browser/ui/webui/history_ui.h @@ -45,6 +45,11 @@ class BrowsingHistoryHandler : public content::WebUIMessageHandler, const content::NotificationSource& source, const content::NotificationDetails& details) OVERRIDE; + // Removes duplicate visits from the given list of query results, only + // retaining the most recent visit to a URL on a particular day. |results| + // must already be sorted by visit time, most recent first. + static void RemoveDuplicateResults(ListValue* results); + private: // The range for which to return results: // - ALLTIME: allows access to all the results in a paginated way. @@ -61,11 +66,15 @@ class BrowsingHistoryHandler : public content::WebUIMessageHandler, // Creates a history query result value. DictionaryValue* CreateQueryResultValue( - const GURL& url, const string16 title, base::Time visit_time, + const GURL& url, const string16& title, base::Time visit_time, bool is_search_result, const string16& snippet); - // Helper to send the accumulated results of the query to the front end. - void ReturnResultsToFrontEnd(); + // Sends the accumulated results of the query to the front end, truncating + // the number to |max_count| if necessary. If |max_count| is 0, the results + // are not truncated. + // If |remove_duplicates| is true, duplicate visits on the same day are + // removed. + void ReturnResultsToFrontEnd(bool remove_duplicates, int max_count); // Callback from |web_history_timer_| when a response from web history has // not been received in time. diff --git a/chrome/browser/ui/webui/history_ui_unittest.cc b/chrome/browser/ui/webui/history_ui_unittest.cc new file mode 100644 index 0000000..72c5eed --- /dev/null +++ b/chrome/browser/ui/webui/history_ui_unittest.cc @@ -0,0 +1,119 @@ +// Copyright (c) 2013 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "history_ui.h" + +#include "base/utf_string_conversions.h" +#include "testing/gtest/include/gtest/gtest.h" + +namespace { + +struct TestResult { + std::string url; + int64 hour_offset; // Visit time in hours past the baseline time. +}; + +// Duplicates on the same day in the local timezone are removed, so set a +// baseline time in local time. +const base::Time baseline_time = base::Time::UnixEpoch().LocalMidnight(); + +// For each item in |results|, create a new Value representing the visit, and +// insert it into |list_value|. +void AddResultsToList(TestResult* results, + int results_size, + ListValue* list_value) { + for (int i = 0; i < results_size; ++i) { + DictionaryValue* result = new DictionaryValue; + result->SetString("url", results[i].url); + base::Time time = + baseline_time + base::TimeDelta::FromHours(results[i].hour_offset); + result->SetDouble("time", time.ToJsTime()); + list_value->Append(result); + } +} + +// Returns true if the result at |index| in |results| matches the test data +// given by |correct_result|, otherwise returns false. +bool ResultEquals( + const ListValue& results, int index, TestResult correct_result) { + const DictionaryValue* result; + string16 url; + double timestamp; + + if (results.GetDictionary(index, &result) && + result->GetDouble("time", ×tamp) && + result->GetString("url", &url)) { + base::Time correct_time = + baseline_time + base::TimeDelta::FromHours(correct_result.hour_offset); + return base::Time::FromJsTime(timestamp) == correct_time && + url == ASCIIToUTF16(correct_result.url); + } + NOTREACHED(); + return false; +} + +} // namespace + +// Tests that the RemoveDuplicateResults method correctly removes duplicate +// visits to the same URL on the same day. +TEST(HistoryUITest, RemoveDuplicateResults) { + { + // Basic test that duplicates on the same day are removed. + TestResult test_data[] = { + { "http://google.com", 0 }, + { "http://google.de", 1 }, + { "http://google.com", 2 }, + { "http://google.com", 3 } + }; + ListValue results; + AddResultsToList(test_data, arraysize(test_data), &results); + BrowsingHistoryHandler::RemoveDuplicateResults(&results); + + ASSERT_EQ(2U, results.GetSize()); + EXPECT_TRUE(ResultEquals(results, 0, test_data[0])); + EXPECT_TRUE(ResultEquals(results, 1, test_data[1])); + } + + { + // Test that a duplicate URL on the next day is not removed. + TestResult test_data[] = { + { "http://google.com", 0 }, + { "http://google.com", 23 }, + { "http://google.com", 24 }, + }; + ListValue results; + AddResultsToList(test_data, arraysize(test_data), &results); + BrowsingHistoryHandler::RemoveDuplicateResults(&results); + + ASSERT_EQ(2U, results.GetSize()); + EXPECT_TRUE(ResultEquals(results, 0, test_data[0])); + EXPECT_TRUE(ResultEquals(results, 1, test_data[2])); + } + + { + // Test multiple duplicates across multiple days. + TestResult test_data[] = { + // First day. + { "http://google.de", 0 }, + { "http://google.com", 1 }, + { "http://google.de", 2 }, + { "http://google.com", 3 }, + + // Second day. + { "http://google.de", 24 }, + { "http://google.com", 25 }, + { "http://google.de", 26 }, + { "http://google.com", 27 }, + }; + ListValue results; + AddResultsToList(test_data, arraysize(test_data), &results); + BrowsingHistoryHandler::RemoveDuplicateResults(&results); + + ASSERT_EQ(4U, results.GetSize()); + EXPECT_TRUE(ResultEquals(results, 0, test_data[0])); + EXPECT_TRUE(ResultEquals(results, 1, test_data[1])); + EXPECT_TRUE(ResultEquals(results, 2, test_data[4])); + EXPECT_TRUE(ResultEquals(results, 3, test_data[5])); + } +} |