summaryrefslogtreecommitdiffstats
path: root/chrome/browser/ui/webui/history_ui_unittest.cc
diff options
context:
space:
mode:
authordubroy@chromium.org <dubroy@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-18 14:23:35 +0000
committerdubroy@chromium.org <dubroy@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-18 14:23:35 +0000
commit19d65f2951aeb3dd87a2782a2fa04bf24c146f37 (patch)
treed8afa58feb84d5761ac652cb11bee8af5e258cc5 /chrome/browser/ui/webui/history_ui_unittest.cc
parent664592aa4910ea82259e59f154136220bd9d9b20 (diff)
downloadchromium_src-19d65f2951aeb3dd87a2782a2fa04bf24c146f37.zip
chromium_src-19d65f2951aeb3dd87a2782a2fa04bf24c146f37.tar.gz
chromium_src-19d65f2951aeb3dd87a2782a2fa04bf24c146f37.tar.bz2
History: Pass min/max timestamps as query parameters to history server.
Queries to the history server should match the same time range as the query to the history backend. Also de-dupe and sort results before handing them to the JS frontend. BUG=None Review URL: https://codereview.chromium.org/12217125 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@183116 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/browser/ui/webui/history_ui_unittest.cc')
-rw-r--r--chrome/browser/ui/webui/history_ui_unittest.cc119
1 files changed, 119 insertions, 0 deletions
diff --git a/chrome/browser/ui/webui/history_ui_unittest.cc b/chrome/browser/ui/webui/history_ui_unittest.cc
new file mode 100644
index 0000000..72c5eed
--- /dev/null
+++ b/chrome/browser/ui/webui/history_ui_unittest.cc
@@ -0,0 +1,119 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "history_ui.h"
+
+#include "base/utf_string_conversions.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace {
+
+struct TestResult {
+ std::string url;
+ int64 hour_offset; // Visit time in hours past the baseline time.
+};
+
+// Duplicates on the same day in the local timezone are removed, so set a
+// baseline time in local time.
+const base::Time baseline_time = base::Time::UnixEpoch().LocalMidnight();
+
+// For each item in |results|, create a new Value representing the visit, and
+// insert it into |list_value|.
+void AddResultsToList(TestResult* results,
+ int results_size,
+ ListValue* list_value) {
+ for (int i = 0; i < results_size; ++i) {
+ DictionaryValue* result = new DictionaryValue;
+ result->SetString("url", results[i].url);
+ base::Time time =
+ baseline_time + base::TimeDelta::FromHours(results[i].hour_offset);
+ result->SetDouble("time", time.ToJsTime());
+ list_value->Append(result);
+ }
+}
+
+// Returns true if the result at |index| in |results| matches the test data
+// given by |correct_result|, otherwise returns false.
+bool ResultEquals(
+ const ListValue& results, int index, TestResult correct_result) {
+ const DictionaryValue* result;
+ string16 url;
+ double timestamp;
+
+ if (results.GetDictionary(index, &result) &&
+ result->GetDouble("time", &timestamp) &&
+ result->GetString("url", &url)) {
+ base::Time correct_time =
+ baseline_time + base::TimeDelta::FromHours(correct_result.hour_offset);
+ return base::Time::FromJsTime(timestamp) == correct_time &&
+ url == ASCIIToUTF16(correct_result.url);
+ }
+ NOTREACHED();
+ return false;
+}
+
+} // namespace
+
+// Tests that the RemoveDuplicateResults method correctly removes duplicate
+// visits to the same URL on the same day.
+TEST(HistoryUITest, RemoveDuplicateResults) {
+ {
+ // Basic test that duplicates on the same day are removed.
+ TestResult test_data[] = {
+ { "http://google.com", 0 },
+ { "http://google.de", 1 },
+ { "http://google.com", 2 },
+ { "http://google.com", 3 }
+ };
+ ListValue results;
+ AddResultsToList(test_data, arraysize(test_data), &results);
+ BrowsingHistoryHandler::RemoveDuplicateResults(&results);
+
+ ASSERT_EQ(2U, results.GetSize());
+ EXPECT_TRUE(ResultEquals(results, 0, test_data[0]));
+ EXPECT_TRUE(ResultEquals(results, 1, test_data[1]));
+ }
+
+ {
+ // Test that a duplicate URL on the next day is not removed.
+ TestResult test_data[] = {
+ { "http://google.com", 0 },
+ { "http://google.com", 23 },
+ { "http://google.com", 24 },
+ };
+ ListValue results;
+ AddResultsToList(test_data, arraysize(test_data), &results);
+ BrowsingHistoryHandler::RemoveDuplicateResults(&results);
+
+ ASSERT_EQ(2U, results.GetSize());
+ EXPECT_TRUE(ResultEquals(results, 0, test_data[0]));
+ EXPECT_TRUE(ResultEquals(results, 1, test_data[2]));
+ }
+
+ {
+ // Test multiple duplicates across multiple days.
+ TestResult test_data[] = {
+ // First day.
+ { "http://google.de", 0 },
+ { "http://google.com", 1 },
+ { "http://google.de", 2 },
+ { "http://google.com", 3 },
+
+ // Second day.
+ { "http://google.de", 24 },
+ { "http://google.com", 25 },
+ { "http://google.de", 26 },
+ { "http://google.com", 27 },
+ };
+ ListValue results;
+ AddResultsToList(test_data, arraysize(test_data), &results);
+ BrowsingHistoryHandler::RemoveDuplicateResults(&results);
+
+ ASSERT_EQ(4U, results.GetSize());
+ EXPECT_TRUE(ResultEquals(results, 0, test_data[0]));
+ EXPECT_TRUE(ResultEquals(results, 1, test_data[1]));
+ EXPECT_TRUE(ResultEquals(results, 2, test_data[4]));
+ EXPECT_TRUE(ResultEquals(results, 3, test_data[5]));
+ }
+}