summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authordubroy@chromium.org <dubroy@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-12-06 15:42:32 +0000
committerdubroy@chromium.org <dubroy@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-12-06 15:42:32 +0000
commit3a8f32e09df393644ef80e156d2996af553f9bb7 (patch)
tree1c7eb925402e50df4de8a9ace82ff8969129e5a1
parent24fc4deca892b4957aa96fb74775d1919bbab732 (diff)
downloadchromium_src-3a8f32e09df393644ef80e156d2996af553f9bb7.zip
chromium_src-3a8f32e09df393644ef80e156d2996af553f9bb7.tar.gz
chromium_src-3a8f32e09df393644ef80e156d2996af553f9bb7.tar.bz2
History: Query for results a page at a time, rather than a day at a time.
The history frontend displays visits a page at a time. Add the ability to query the backend for contiguous pages of results, rather than making multiple queries until enough results have accumulated. This additionally fixes a bug where visits would be skipped if a day (or month, when searching) had more than 150 results. BUG= Review URL: https://chromiumcodereview.appspot.com/11415098 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@171498 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--chrome/browser/autocomplete/history_contents_provider.cc3
-rw-r--r--chrome/browser/history/history_backend.cc13
-rw-r--r--chrome/browser/history/history_backend_unittest.cc5
-rw-r--r--chrome/browser/history/history_querying_unittest.cc71
-rw-r--r--chrome/browser/history/history_types.cc80
-rw-r--r--chrome/browser/history/history_types.h102
-rw-r--r--chrome/browser/history/history_types_unittest.cc38
-rw-r--r--chrome/browser/history/text_database.cc67
-rw-r--r--chrome/browser/history/text_database.h3
-rw-r--r--chrome/browser/history/visit_database.cc54
-rw-r--r--chrome/browser/history/visit_database.h3
-rw-r--r--chrome/browser/history/visit_database_unittest.cc21
-rw-r--r--chrome/browser/resources/history/history.js39
-rw-r--r--chrome/browser/ui/webui/history_ui.cc38
-rw-r--r--chrome/test/data/webui/history_browsertest.js106
15 files changed, 424 insertions, 219 deletions
diff --git a/chrome/browser/autocomplete/history_contents_provider.cc b/chrome/browser/autocomplete/history_contents_provider.cc
index a977913..5b8023d 100644
--- a/chrome/browser/autocomplete/history_contents_provider.cc
+++ b/chrome/browser/autocomplete/history_contents_provider.cc
@@ -160,7 +160,8 @@ HistoryContentsProvider::~HistoryContentsProvider() {
void HistoryContentsProvider::QueryComplete(HistoryService::Handle handle,
history::QueryResults* results) {
- results_.AppendResultsBySwapping(results, true);
+ DCHECK(results_.empty());
+ results_.Swap(results);
have_results_ = true;
ConvertResults();
diff --git a/chrome/browser/history/history_backend.cc b/chrome/browser/history/history_backend.cc
index dfcbcd7..6a4b8e5 100644
--- a/chrome/browser/history/history_backend.cc
+++ b/chrome/browser/history/history_backend.cc
@@ -1380,13 +1380,13 @@ void HistoryBackend::QueryHistoryBasic(URLDatabase* url_db,
QueryResults* result) {
// First get all visits.
VisitVector visits;
- visit_db->GetVisibleVisitsInRange(options.begin_time, options.end_time,
- options.max_count, &visits);
+ visit_db->GetVisibleVisitsInRange(options, &visits);
DCHECK(options.max_count == 0 ||
static_cast<int>(visits.size()) <= options.max_count);
// Now add them and the URL rows to the results.
URLResult url_result;
+ QueryCursor cursor;
for (size_t i = 0; i < visits.size(); i++) {
const VisitRow visit = visits[i];
@@ -1419,7 +1419,11 @@ void HistoryBackend::QueryHistoryBasic(URLDatabase* url_db,
// We don't set any of the query-specific parts of the URLResult, since
// snippets and stuff don't apply to basic querying.
result->AppendURLBySwapping(&url_result);
+
+ cursor.rowid_ = visit.visit_id;
+ cursor.time_ = visit.visit_time;
}
+ result->set_cursor(cursor);
if (options.begin_time <= first_recorded_time_)
result->set_reached_beginning(true);
@@ -1441,6 +1445,7 @@ void HistoryBackend::QueryHistoryFTS(const string16& text_query,
// Now get the row and visit information for each one.
URLResult url_result; // Declare outside loop to prevent re-construction.
+ QueryCursor cursor;
for (size_t i = 0; i < fts_matches.size(); i++) {
if (options.max_count != 0 &&
static_cast<int>(result->size()) >= options.max_count)
@@ -1472,7 +1477,11 @@ void HistoryBackend::QueryHistoryFTS(const string16& text_query,
// Add it to the vector, this will clear our |url_row| object as a
// result of the swap.
result->AppendURLBySwapping(&url_result);
+
+ cursor.rowid_ = fts_matches[i].rowid;
+ cursor.time_ = fts_matches[i].time;
}
+ result->set_cursor(cursor);
if (options.begin_time <= first_recorded_time_)
result->set_reached_beginning(true);
diff --git a/chrome/browser/history/history_backend_unittest.cc b/chrome/browser/history/history_backend_unittest.cc
index 5aa2458..4aa573d 100644
--- a/chrome/browser/history/history_backend_unittest.cc
+++ b/chrome/browser/history/history_backend_unittest.cc
@@ -819,8 +819,9 @@ TEST_F(HistoryBackendTest, KeywordGenerated) {
// But no visible visits.
visits.clear();
- backend_->db()->GetVisibleVisitsInRange(base::Time(), base::Time(), 1,
- &visits);
+ QueryOptions query_options;
+ query_options.max_count = 1;
+ backend_->db()->GetVisibleVisitsInRange(query_options, &visits);
EXPECT_TRUE(visits.empty());
// Expire the visits.
diff --git a/chrome/browser/history/history_querying_unittest.cc b/chrome/browser/history/history_querying_unittest.cc
index 7ad3dbc..d1b94fb 100644
--- a/chrome/browser/history/history_querying_unittest.cc
+++ b/chrome/browser/history/history_querying_unittest.cc
@@ -35,7 +35,7 @@ struct TestEntry {
// These are deliberately added out of chronological order. The history
// service should sort them by visit time when returning query results.
- // The correct index sort order is 4 2 3 1 0.
+ // The correct index sort order is 4 2 3 1 7 6 5 0.
{"http://www.google.com/1", "Title 1", 10,
"PAGEONE FOO some body text"},
{"http://www.google.com/3", "Title 3", 8,
@@ -45,6 +45,14 @@ struct TestEntry {
// A more recent visit of the first one.
{"http://example.com/", "Other", 6, "Other"},
+
+ {"http://www.google.com/6", "Title 6", 12,
+ "I'm the second oldest"},
+
+ {"http://www.google.com/4", "Title 4", 11,
+ "duplicate timestamps"},
+ {"http://www.google.com/5", "Title 5", 11,
+ "duplicate timestamps"},
};
// Returns true if the nth result in the given results set matches. It will
@@ -85,6 +93,41 @@ class HistoryQueryTest : public testing::Test {
results->Swap(&last_query_results_);
}
+ // Test paging through results using a cursor.
+ // Defined here so code can be shared for the FTS version and the non-FTS
+ // version.
+ void TestWithCursor(const std::string& query_text,
+ int* expected_results,
+ int results_length) {
+ ASSERT_TRUE(history_.get());
+
+ QueryOptions options;
+ QueryResults results;
+
+ options.max_count = 1;
+ for (int i = 0; i < results_length; i++) {
+ SCOPED_TRACE(testing::Message() << "i = " << i);
+ QueryHistory(query_text, options, &results);
+ ASSERT_EQ(1U, results.size());
+ EXPECT_TRUE(NthResultIs(results, 0, expected_results[i]));
+ options.cursor = results.cursor();
+ }
+ QueryHistory(query_text, options, &results);
+ EXPECT_EQ(0U, results.size());
+
+ // Try using a cursor with a max_count > 1.
+ options.max_count = 2;
+ options.cursor.Clear();
+ for (int i = 0; i < results_length / 2; i++) {
+ SCOPED_TRACE(testing::Message() << "i = " << i);
+ QueryHistory(query_text, options, &results);
+ ASSERT_EQ(2U, results.size());
+ EXPECT_TRUE(NthResultIs(results, 0, expected_results[i * 2]));
+ EXPECT_TRUE(NthResultIs(results, 1, expected_results[i * 2 + 1]));
+ options.cursor = results.cursor();
+ }
+ }
+
protected:
scoped_ptr<HistoryService> history_;
@@ -154,13 +197,18 @@ TEST_F(HistoryQueryTest, Basic) {
QueryOptions options;
QueryResults results;
- // Test duplicate collapsing.
+ // Test duplicate collapsing. 0 is an older duplicate of 4, and should not
+ // appear in the result set.
QueryHistory(std::string(), options, &results);
- EXPECT_EQ(4U, results.size());
+ EXPECT_EQ(7U, results.size());
+
EXPECT_TRUE(NthResultIs(results, 0, 4));
EXPECT_TRUE(NthResultIs(results, 1, 2));
EXPECT_TRUE(NthResultIs(results, 2, 3));
EXPECT_TRUE(NthResultIs(results, 3, 1));
+ EXPECT_TRUE(NthResultIs(results, 4, 7));
+ EXPECT_TRUE(NthResultIs(results, 5, 6));
+ EXPECT_TRUE(NthResultIs(results, 6, 5));
// Next query a time range. The beginning should be inclusive, the ending
// should be exclusive.
@@ -261,6 +309,7 @@ TEST_F(HistoryQueryTest, FTSTitle) {
// Query all time but with a limit on the number of entries. We should
// get the N most recent entries.
+ options.max_count = 3;
QueryHistory("title", options, &results);
EXPECT_EQ(3U, results.size());
EXPECT_TRUE(NthResultIs(results, 0, 2));
@@ -358,4 +407,20 @@ TEST_F(HistoryQueryTest, FTSDupes) {
}
*/
+// Test iterating over pages of results using a cursor.
+TEST_F(HistoryQueryTest, Cursor) {
+ // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not
+ // be de-duplicated.
+ int expected_results[] = { 4, 2, 3, 1, 7, 6, 5, 0 };
+ TestWithCursor("", expected_results, arraysize(expected_results));
+}
+
+TEST_F(HistoryQueryTest, FTSCursor) {
+ // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not
+ // be de-duplicated. Entry #4 does not contain the text "title", so it
+ // shouldn't appear.
+ int expected_results[] = { 2, 3, 1, 7, 6, 5 };
+ TestWithCursor("title", expected_results, arraysize(expected_results));
+}
+
} // namespace history
diff --git a/chrome/browser/history/history_types.cc b/chrome/browser/history/history_types.cc
index 68a32ba..c82893f 100644
--- a/chrome/browser/history/history_types.cc
+++ b/chrome/browser/history/history_types.cc
@@ -8,6 +8,8 @@
#include "base/logging.h"
#include "base/stl_util.h"
+#include "base/string_number_conversions.h"
+#include "base/values.h"
#include "chrome/browser/history/page_usage_data.h"
namespace history {
@@ -116,6 +118,38 @@ void URLResult::SwapResult(URLResult* other) {
title_match_positions_.swap(other->title_match_positions_);
}
+// QueryCursor -----------------------------------------------------------------
+
+QueryCursor::QueryCursor() : rowid_(0) {
+}
+
+QueryCursor::~QueryCursor() {
+}
+
+Value* QueryCursor::ToValue() const {
+ ListValue* value = new ListValue;
+ value->AppendString(base::Int64ToString(time_.ToInternalValue()));
+ value->AppendString(base::Int64ToString(rowid_));
+ return value;
+}
+
+bool QueryCursor::FromValue(const Value* value, QueryCursor* cursor) {
+ if (value->GetType() == Value::TYPE_LIST) {
+ const ListValue* list_value = static_cast<const ListValue*>(value);
+ string16 string_value;
+ int64 timestamp;
+
+ if (list_value->GetString(0, &string_value) &&
+ base::StringToInt64(string_value, &timestamp) &&
+ list_value->GetString(1, &string_value) &&
+ base::StringToInt64(string_value, &cursor->rowid_)) {
+ cursor->time_ = base::Time::FromInternalValue(timestamp);
+ return true;
+ }
+ }
+ return false;
+}
+
// QueryResults ----------------------------------------------------------------
QueryResults::QueryResults() : reached_beginning_(false) {
@@ -146,6 +180,7 @@ const size_t* QueryResults::MatchesForURL(const GURL& url,
void QueryResults::Swap(QueryResults* other) {
std::swap(first_time_searched_, other->first_time_searched_);
std::swap(reached_beginning_, other->reached_beginning_);
+ std::swap(cursor_, other->cursor_);
results_.swap(other->results_);
url_to_results_.swap(other->url_to_results_);
}
@@ -158,31 +193,6 @@ void QueryResults::AppendURLBySwapping(URLResult* result) {
AddURLUsageAtIndex(new_result->url(), results_.size() - 1);
}
-void QueryResults::AppendResultsBySwapping(QueryResults* other,
- bool remove_dupes) {
- if (remove_dupes) {
- // Delete all entries in the other array that are already in this one.
- for (size_t i = 0; i < results_.size(); i++)
- other->DeleteURL(results_[i]->url());
- }
-
- if (first_time_searched_ > other->first_time_searched_)
- std::swap(first_time_searched_, other->first_time_searched_);
-
- if (reached_beginning_ != other->reached_beginning_)
- std::swap(reached_beginning_, other->reached_beginning_);
-
- for (size_t i = 0; i < other->results_.size(); i++) {
- // Just transfer pointer ownership.
- results_.push_back(other->results_[i]);
- AddURLUsageAtIndex(results_.back()->url(), results_.size() - 1);
- }
-
- // We just took ownership of all the results in the input vector.
- other->results_.clear();
- other->url_to_results_.clear();
-}
-
void QueryResults::DeleteURL(const GURL& url) {
// Delete all instances of this URL. We re-query each time since each
// mutation will cause the indices to change.
@@ -269,6 +279,26 @@ void QueryOptions::SetRecentDayRange(int days_ago) {
begin_time = end_time - base::TimeDelta::FromDays(days_ago);
}
+int64 QueryOptions::EffectiveBeginTime() const {
+ return begin_time.ToInternalValue();
+}
+
+int64 QueryOptions::EffectiveEndTime() const {
+ int64 end = end_time.is_null() ?
+ std::numeric_limits<int64>::max() : end_time.ToInternalValue();
+
+ // If the cursor is specified, it provides the true end time.
+ if (!cursor.empty()) {
+ DCHECK(cursor.time_.ToInternalValue() <= end);
+ return cursor.time_.ToInternalValue();
+ }
+ return end;
+}
+
+int QueryOptions::EffectiveMaxCount() const {
+ return max_count ? max_count : std::numeric_limits<int>::max();
+}
+
// KeywordSearchTermVisit -----------------------------------------------------
KeywordSearchTermVisit::KeywordSearchTermVisit() : visits(0) {}
diff --git a/chrome/browser/history/history_types.h b/chrome/browser/history/history_types.h
index 9a5d35c..ac1d777c 100644
--- a/chrome/browser/history/history_types.h
+++ b/chrome/browser/history/history_types.h
@@ -16,6 +16,7 @@
#include "base/memory/ref_counted_memory.h"
#include "base/string16.h"
#include "base/time.h"
+#include "base/values.h"
#include "chrome/browser/history/snippet.h"
#include "chrome/browser/search_engines/template_url_id.h"
#include "chrome/common/ref_counted_util.h"
@@ -31,7 +32,9 @@ namespace history {
// Forward declaration for friend statements.
class HistoryBackend;
+class TextDatabase;
class URLDatabase;
+class VisitDatabase;
// Structure to hold redirect lists for URLs. For a redirect chain
// A -> B -> C, and entry in the map would look like "A => {B -> C}".
@@ -321,6 +324,42 @@ class URLResult : public URLRow {
// We support the implicit copy constructor and operator=.
};
+// QueryCursor -----------------------------------------------------------------
+
+// Represents the point at which a QueryResult ended. This should be treated as
+// an opaque token by clients of the history service.
+class QueryCursor {
+ public:
+ QueryCursor();
+ ~QueryCursor();
+
+ // Returns a newly-allocated Value object representing the cursor. The caller
+ // takes ownership of the value.
+ Value* ToValue() const;
+
+ // Opposite of ToValue() -- converts a Value object to a QueryCursor.
+ // Returns true if the conversion was successful.
+ static bool FromValue(const Value* value, QueryCursor* cursor);
+
+ bool empty() const {
+ return time_.is_null() && rowid_ == 0;
+ }
+
+ void Clear() {
+ time_ = base::Time();
+ rowid_ = 0;
+ }
+
+ private:
+ friend class HistoryBackend;
+ friend struct QueryOptions;
+ friend class TextDatabase;
+ friend class VisitDatabase;
+
+ int64 rowid_;
+ base::Time time_;
+};
+
// QueryResults ----------------------------------------------------------------
// Encapsulates the results of a history query. It supports an ordered list of
@@ -352,6 +391,9 @@ class QueryResults {
void set_reached_beginning(bool reached) { reached_beginning_ = reached; }
bool reached_beginning() { return reached_beginning_; }
+ void set_cursor(const QueryCursor& cursor) { cursor_ = cursor; }
+ QueryCursor cursor() { return cursor_; }
+
size_t size() const { return results_.size(); }
bool empty() const { return results_.empty(); }
@@ -385,12 +427,6 @@ class QueryResults {
// object will be cleared after this call.
void AppendURLBySwapping(URLResult* result);
- // Appends a new result set to the other. The |other| results will be
- // destroyed because the pointer ownership will just be transferred. When
- // |remove_dupes| is set, each URL that appears in this array will be removed
- // from the |other| array before appending.
- void AppendResultsBySwapping(QueryResults* other, bool remove_dupes);
-
// Removes all instances of the given URL from the result set.
void DeleteURL(const GURL& url);
@@ -424,6 +460,23 @@ class QueryResults {
// Maps URLs to entries in results_.
URLToResultIndices url_to_results_;
+ // An opaque value representing the point at which the QueryResult begins.
+ // This value can be passed to a future query through QueryOptions::cursor,
+ // in order to fetch a set of results contiguous to, but strictly older than,
+ // the results in this object. (Just using timestamps doesn't work because
+ // multiple visits can have the same timestamp.)
+ //
+ // For example, to fetch all results, 100 at a time:
+ //
+ // QueryOptions options;
+ // QueryResults results;
+ // options.max_count = 100;
+ // do {
+ // QueryHistory(query_text, options, &results);
+ // options.cursor = results.cursor();
+ // } while (results.size() > 0);
+ QueryCursor cursor_;
+
DISALLOW_COPY_AND_ASSIGN(QueryResults);
};
@@ -432,18 +485,15 @@ class QueryResults {
struct QueryOptions {
QueryOptions();
- // The time range to search for matches in.
- //
- // This will match only the one recent visit of a URL. For text search
- // queries, if the URL was visited in the given time period, but has also been
- // visited more recently than that, it will not be returned. When the text
- // query is empty, this will return the most recent visit within the time
- // range.
+ // The time range to search for matches in. The beginning is inclusive and
+ // the ending is exclusive. Either one (or both) may be null. If |cursor| is
+ // set, it takes precedence over end_time.
//
- // As a special case, if both times are is_null(), then the entire database
- // will be searched. However, if you set one, you must set the other.
- //
- // The beginning is inclusive and the ending is exclusive.
+ // This will match only the one recent visit of a URL. For text search
+ // queries, if the URL was visited in the given time period, but has also
+ // been visited more recently than that, it will not be returned. When the
+ // text query is empty, this will return the most recent visit within the
+ // time range.
base::Time begin_time;
base::Time end_time;
@@ -458,6 +508,24 @@ struct QueryOptions {
// Only search within the page body if true, otherwise search all columns
// including url and time. Defaults to false.
bool body_only;
+
+ // If set, the cursor provides an alternate way to specify the end of the
+ // query range. The value should come from QueryResults::cursor() from a
+ // previous query, which means that the new query should only return results
+ // older than the ones in the previous query. Due to the possiblity of
+ // duplicate timestamps, |end_time| is not sufficient for that purpose.
+ QueryCursor cursor;
+
+ // Helpers to get the effective parameters values, since a value of 0 means
+ // "unspecified".
+ int EffectiveMaxCount() const;
+ int64 EffectiveBeginTime() const;
+
+ // The effective end time can be determined by either |end_time| or |cursor|.
+ // If cursor is set, it takes precedence. This allows consecutive queries to
+ // re-use the same QueryOptions to fetch consecutive pages of results by
+ // simply copying the cursor from the QueryResults of the previous query.
+ int64 EffectiveEndTime() const;
};
// KeywordSearchTermVisit -----------------------------------------------------
diff --git a/chrome/browser/history/history_types_unittest.cc b/chrome/browser/history/history_types_unittest.cc
index b71e855..b6fac35 100644
--- a/chrome/browser/history/history_types_unittest.cc
+++ b/chrome/browser/history/history_types_unittest.cc
@@ -129,44 +129,6 @@ TEST(HistoryQueryResult, ResultDeleteURL) {
EXPECT_FALSE(results.MatchesForURL(url2, NULL));
}
-TEST(HistoryQueryResult, AppendResults) {
- GURL url1(kURL1);
- GURL url2(kURL2);
- GURL url3(kURL3);
-
- // This is the base.
- QueryResults results;
- AddSimpleData(&results);
-
- // Now create the appendee.
- QueryResults appendee;
- AddAlternateData(&appendee);
-
- results.AppendResultsBySwapping(&appendee, true);
- CheckHistoryResultConsistency(results);
-
- // There should be 3 results, the second one of the appendee should be
- // deleted because it was already in the first one and we said remove dupes.
- ASSERT_EQ(4U, results.size());
-
- // The first URL should be unchanged in the first two spots.
- size_t match_count;
- const size_t* matches = results.MatchesForURL(url1, &match_count);
- ASSERT_EQ(2U, match_count);
- EXPECT_TRUE((matches[0] == 0 && matches[1] == 1) ||
- (matches[0] == 1 && matches[1] == 0));
-
- // The second URL should be there once after that
- matches = results.MatchesForURL(url2, &match_count);
- ASSERT_EQ(1U, match_count);
- EXPECT_TRUE(matches[0] == 2);
-
- // The third one should be after that.
- matches = results.MatchesForURL(url3, &match_count);
- ASSERT_EQ(1U, match_count);
- EXPECT_TRUE(matches[0] == 3);
-}
-
TEST(HistoryQueryResult, RowSignificance) {
const base::Time& threshold(AutocompleteAgeThreshold());
const GURL url("http://www.google.com/");
diff --git a/chrome/browser/history/text_database.cc b/chrome/browser/history/text_database.cc
index 66d53798..e9bb3c7 100644
--- a/chrome/browser/history/text_database.cc
+++ b/chrome/browser/history/text_database.cc
@@ -289,39 +289,51 @@ void TextDatabase::GetTextMatches(const std::string& query,
base::Time* first_time_searched) {
*first_time_searched = options.begin_time;
- std::string sql = "SELECT url, title, time, offsets(pages), body FROM pages "
- "LEFT OUTER JOIN info ON pages.rowid = info.rowid WHERE ";
+ std::string sql =
+ "SELECT info.rowid, url, title, time, offsets(pages), body FROM pages "
+ "LEFT OUTER JOIN info ON pages.rowid = info.rowid WHERE ";
sql += options.body_only ? "body " : "pages ";
- sql += "MATCH ? AND time >= ? AND time < ? ORDER BY time DESC LIMIT ?";
+ sql += "MATCH ? AND time >= ? AND (time < ?";
+ if (!options.cursor.empty())
+ sql += " OR (time = ? AND info.rowid < ?)";
+ // Times may not be unique, so also sort by rowid to ensure a stable order.
+ sql += ") ORDER BY time DESC, info.rowid DESC LIMIT ?";
- // Generate unique IDs for the two possible variations of the statement,
+ // Generate unique IDs for the different variations of the statement,
// so they don't share the same cached prepared statement.
sql::StatementID body_only_id = SQL_FROM_HERE;
sql::StatementID pages_id = SQL_FROM_HERE;
-
- sql::Statement statement(db_.GetCachedStatement(
- (options.body_only ? body_only_id : pages_id), sql.c_str()));
-
- // When their values indicate "unspecified", saturate the numbers to the max
- // or min to get the correct result.
- int64 effective_begin_time = options.begin_time.is_null() ?
- 0 : options.begin_time.ToInternalValue();
- int64 effective_end_time = options.end_time.is_null() ?
- std::numeric_limits<int64>::max() : options.end_time.ToInternalValue();
- int effective_max_count = options.max_count ?
- options.max_count : std::numeric_limits<int>::max();
-
- statement.BindString(0, query);
- statement.BindInt64(1, effective_begin_time);
- statement.BindInt64(2, effective_end_time);
- statement.BindInt(3, effective_max_count);
+ sql::StatementID pages_with_cursor_id = SQL_FROM_HERE;
+
+ // Ensure that cursor and body_only aren't both specified, because that
+ // combination is not covered here.
+ DCHECK(!options.body_only || options.cursor.empty());
+
+ // Choose the correct statement ID based on the options.
+ sql::StatementID statement_id = pages_id;
+ if (options.body_only)
+ statement_id = body_only_id;
+ else if (!options.cursor.empty())
+ statement_id = pages_with_cursor_id;
+
+ sql::Statement statement(db_.GetCachedStatement(statement_id, sql.c_str()));
+
+ int i = 0;
+ statement.BindString(i++, query);
+ statement.BindInt64(i++, options.EffectiveBeginTime());
+ statement.BindInt64(i++, options.EffectiveEndTime());
+ if (!options.cursor.empty()) {
+ statement.BindInt64(i++, options.EffectiveEndTime());
+ statement.BindInt64(i++, options.cursor.rowid_);
+ }
+ statement.BindInt(i++, options.EffectiveMaxCount());
while (statement.Step()) {
// TODO(brettw) allow canceling the query in the middle.
// if (canceled_or_something)
// break;
- GURL url(statement.ColumnString(0));
+ GURL url(statement.ColumnString(1));
URLSet::const_iterator found_url = found_urls->find(url);
if (found_url != found_urls->end())
continue; // Don't add this duplicate.
@@ -329,16 +341,17 @@ void TextDatabase::GetTextMatches(const std::string& query,
// Fill the results into the vector (avoid copying the URL with Swap()).
results->resize(results->size() + 1);
Match& match = results->at(results->size() - 1);
+ match.rowid = statement.ColumnInt64(0);
match.url.Swap(&url);
- match.title = statement.ColumnString16(1);
- match.time = base::Time::FromInternalValue(statement.ColumnInt64(2));
+ match.title = statement.ColumnString16(2);
+ match.time = base::Time::FromInternalValue(statement.ColumnInt64(3));
// Extract any matches in the title.
- std::string offsets_str = statement.ColumnString(3);
+ std::string offsets_str = statement.ColumnString(4);
Snippet::ExtractMatchPositions(offsets_str, kTitleColumnIndex,
&match.title_match_positions);
- Snippet::ConvertMatchPositionsToWide(statement.ColumnString(1),
+ Snippet::ConvertMatchPositionsToWide(statement.ColumnString(2),
&match.title_match_positions);
// Extract the matches in the body.
@@ -347,7 +360,7 @@ void TextDatabase::GetTextMatches(const std::string& query,
&match_positions);
// Compute the snippet based on those matches.
- std::string body = statement.ColumnString(4);
+ std::string body = statement.ColumnString(5);
match.snippet.ComputeSnippet(match_positions, body);
}
diff --git a/chrome/browser/history/text_database.h b/chrome/browser/history/text_database.h
index 9cf376cb..1f9b4e5 100644
--- a/chrome/browser/history/text_database.h
+++ b/chrome/browser/history/text_database.h
@@ -30,6 +30,9 @@ class TextDatabase {
Match();
~Match();
+ // The database rowid of the match.
+ int64 rowid;
+
// URL of the match.
GURL url;
diff --git a/chrome/browser/history/visit_database.cc b/chrome/browser/history/visit_database.cc
index f15b157..85656a2 100644
--- a/chrome/browser/history/visit_database.cc
+++ b/chrome/browser/history/visit_database.cc
@@ -317,32 +317,42 @@ bool VisitDatabase::GetVisitsInRangeForTransition(
return FillVisitVector(statement, visits);
}
-void VisitDatabase::GetVisibleVisitsInRange(base::Time begin_time,
- base::Time end_time,
- int max_count,
+void VisitDatabase::GetVisibleVisitsInRange(const QueryOptions& options,
VisitVector* visits) {
visits->clear();
+ std::string sql = "SELECT" HISTORY_VISIT_ROW_FIELDS "FROM visits "
+ "WHERE visit_time >= ? AND (visit_time < ? ";
+ if (!options.cursor.empty())
+ sql += " OR (visit_time = ? and id < ?)";
+
// The visit_time values can be duplicated in a redirect chain, so we sort
// by id too, to ensure a consistent ordering just in case.
- sql::Statement statement(GetDB().GetCachedStatement(SQL_FROM_HERE,
- "SELECT" HISTORY_VISIT_ROW_FIELDS "FROM visits "
- "WHERE visit_time >= ? AND visit_time < ? "
- "AND (transition & ?) != 0 " // CHAIN_END
+ sql += ") AND (transition & ?) != 0 " // CHAIN_END
"AND (transition & ?) NOT IN (?, ?, ?) " // NO SUBFRAME or
// KEYWORD_GENERATED
- "ORDER BY visit_time DESC, id DESC"));
-
- // Note that we use min/max values for querying unlimited ranges of time using
- // the same statement. Since the time has an index, this will be about the
- // same amount of work as just doing a query for everything with no qualifier.
- int64 end = end_time.ToInternalValue();
- statement.BindInt64(0, begin_time.ToInternalValue());
- statement.BindInt64(1, end ? end : std::numeric_limits<int64>::max());
- statement.BindInt(2, content::PAGE_TRANSITION_CHAIN_END);
- statement.BindInt(3, content::PAGE_TRANSITION_CORE_MASK);
- statement.BindInt(4, content::PAGE_TRANSITION_AUTO_SUBFRAME);
- statement.BindInt(5, content::PAGE_TRANSITION_MANUAL_SUBFRAME);
- statement.BindInt(6, content::PAGE_TRANSITION_KEYWORD_GENERATED);
+ "ORDER BY visit_time DESC, id DESC";
+
+ // Generate unique IDs for the different variations of the statement,
+ // so they don't share the same cached prepared statement.
+ sql::StatementID id_with_cursor = SQL_FROM_HERE;
+ sql::StatementID id_without_cursor = SQL_FROM_HERE;
+
+ sql::Statement statement(GetDB().GetCachedStatement(
+ options.cursor.empty() ? id_without_cursor : id_with_cursor,
+ sql.c_str()));
+
+ int i = 0;
+ statement.BindInt64(i++, options.EffectiveBeginTime());
+ statement.BindInt64(i++, options.EffectiveEndTime());
+ if (!options.cursor.empty()) {
+ statement.BindInt64(i++, options.EffectiveEndTime());
+ statement.BindInt64(i++, options.cursor.rowid_);
+ }
+ statement.BindInt(i++, content::PAGE_TRANSITION_CHAIN_END);
+ statement.BindInt(i++, content::PAGE_TRANSITION_CORE_MASK);
+ statement.BindInt(i++, content::PAGE_TRANSITION_AUTO_SUBFRAME);
+ statement.BindInt(i++, content::PAGE_TRANSITION_MANUAL_SUBFRAME);
+ statement.BindInt(i++, content::PAGE_TRANSITION_KEYWORD_GENERATED);
std::set<URLID> found_urls;
while (statement.Step()) {
@@ -354,8 +364,10 @@ void VisitDatabase::GetVisibleVisitsInRange(base::Time begin_time,
found_urls.insert(visit.url_id);
visits->push_back(visit);
- if (max_count > 0 && static_cast<int>(visits->size()) >= max_count)
+ if (options.max_count > 0 &&
+ static_cast<int>(visits->size()) >= options.max_count) {
break;
+ }
}
}
diff --git a/chrome/browser/history/visit_database.h b/chrome/browser/history/visit_database.h
index c75e386..c14cf7f 100644
--- a/chrome/browser/history/visit_database.h
+++ b/chrome/browser/history/visit_database.h
@@ -109,8 +109,7 @@ class VisitDatabase {
//
// Only one visit for each URL will be returned, and it will be the most
// recent one in the time range.
- void GetVisibleVisitsInRange(base::Time begin_time, base::Time end_time,
- int max_count,
+ void GetVisibleVisitsInRange(const QueryOptions& options,
VisitVector* visits);
// Fills all visits in the given time ranges into the given vector that are
diff --git a/chrome/browser/history/visit_database_unittest.cc b/chrome/browser/history/visit_database_unittest.cc
index 4a5388c..b9c0b11 100644
--- a/chrome/browser/history/visit_database_unittest.cc
+++ b/chrome/browser/history/visit_database_unittest.cc
@@ -290,23 +290,26 @@ TEST_F(VisitDatabaseTest, GetVisibleVisitsInRange) {
// Query the visits for all time. We should not get the first
// (duplicate of the second) or the redirect or subframe visits.
VisitVector results;
- GetVisibleVisitsInRange(Time(), Time(), 0, &results);
+ QueryOptions options;
+ GetVisibleVisitsInRange(options, &results);
ASSERT_EQ(static_cast<size_t>(2), results.size());
EXPECT_TRUE(IsVisitInfoEqual(results[0], test_visit_rows[3]) &&
IsVisitInfoEqual(results[1], test_visit_rows[1]));
+ // Query for a max count and make sure we get only that number.
+ options.max_count = 1;
+ GetVisibleVisitsInRange(options, &results);
+ ASSERT_EQ(static_cast<size_t>(1), results.size());
+ EXPECT_TRUE(IsVisitInfoEqual(results[0], test_visit_rows[3]));
+
// Query a time range and make sure beginning is inclusive and ending is
// exclusive.
- GetVisibleVisitsInRange(test_visit_rows[1].visit_time,
- test_visit_rows[3].visit_time, 0,
- &results);
+ options.begin_time = test_visit_rows[1].visit_time;
+ options.end_time = test_visit_rows[3].visit_time;
+ options.max_count = 0;
+ GetVisibleVisitsInRange(options, &results);
ASSERT_EQ(static_cast<size_t>(1), results.size());
EXPECT_TRUE(IsVisitInfoEqual(results[0], test_visit_rows[1]));
-
- // Query for a max count and make sure we get only that number.
- GetVisibleVisitsInRange(Time(), Time(), 1, &results);
- ASSERT_EQ(static_cast<size_t>(1), results.size());
- EXPECT_TRUE(IsVisitInfoEqual(results[0], test_visit_rows[3]));
}
TEST_F(VisitDatabaseTest, VisitSource) {
diff --git a/chrome/browser/resources/history/history.js b/chrome/browser/resources/history/history.js
index cd53480..d53cb69 100644
--- a/chrome/browser/resources/history/history.js
+++ b/chrome/browser/resources/history/history.js
@@ -7,7 +7,6 @@
///////////////////////////////////////////////////////////////////////////////
// Globals:
/** @const */ var RESULTS_PER_PAGE = 150;
-/** @const */ var MAX_SEARCH_DEPTH_MONTHS = 18;
// Amount of time between pageviews that we consider a 'break' in browsing,
// measured in milliseconds.
@@ -311,7 +310,7 @@ HistoryModel.prototype.setSearchText = function(searchText, opt_page) {
this.clearModel_();
this.searchText_ = searchText;
this.requestedPage_ = opt_page ? opt_page : 0;
- this.queryHistory_(false);
+ this.queryHistory_();
};
/**
@@ -323,7 +322,7 @@ HistoryModel.prototype.reload = function() {
this.clearModel_();
this.searchText_ = search;
this.requestedPage_ = page;
- this.queryHistory_(false);
+ this.queryHistory_();
};
/**
@@ -353,9 +352,7 @@ HistoryModel.prototype.addResults = function(info, results) {
$('loading-spinner').hidden = true;
this.inFlight_ = false;
this.isQueryFinished_ = info.finished;
-
- if (this.searchText_ && this.searchDepth_ >= MAX_SEARCH_DEPTH_MONTHS)
- this.isQueryFinished_ = true;
+ this.queryCursor_ = info.cursor;
// If our results aren't for our current search term, they're rubbish.
if (info.term != this.searchText_)
@@ -424,10 +421,6 @@ HistoryModel.prototype.clearModel_ = function() {
this.inFlight_ = false; // Whether a query is inflight.
this.searchText_ = '';
- // Keeps track of how far back we've gone in the current query.
- // This is in days when browsing, and in months when searching.
- this.searchDepth_ = 0;
-
this.visits_ = []; // Date-sorted list of visits (most recent first).
this.last_id_ = 0;
selectionAnchor = -1;
@@ -441,6 +434,10 @@ HistoryModel.prototype.clearModel_ = function() {
// currently held in |this.visits_|.
this.isQueryFinished_ = false;
+ // An opaque value that is returned with the query results. This is used to
+ // fetch the next page of results for a query.
+ this.queryCursor_ = null;
+
if (this.view_)
this.view_.clear_();
};
@@ -457,7 +454,7 @@ HistoryModel.prototype.updateSearch_ = function() {
// Try to fetch more results if the current page isn't full.
if (!doneLoading && !this.inFlight_)
- this.queryHistory_(true);
+ this.queryHistory_();
// If we have any data for the requested page, show it.
if (this.changed && this.haveDataForPage_(this.requestedPage_)) {
@@ -468,18 +465,22 @@ HistoryModel.prototype.updateSearch_ = function() {
/**
* Query for history, either for a search or time-based browsing.
- *
- * @param {boolean} continueQuery If true, query for the next page of
- * results from the current query. If false, start a new query.
- *
* @private
*/
-HistoryModel.prototype.queryHistory_ = function(continueQuery) {
- this.searchDepth_ = continueQuery ? this.searchDepth_ + 1 : 0;
- chrome.send('queryHistory',
- [this.searchText_, this.searchDepth_, RESULTS_PER_PAGE]);
+HistoryModel.prototype.queryHistory_ = function() {
+ var endTime = 0;
+
+ // If there are already some visits, pick up the previous query where it
+ // left off.
+ if (this.visits_.length > 0) {
+ var lastVisit = this.visits_.slice(-1)[0];
+ endTime = lastVisit.time;
+ }
+
$('loading-spinner').hidden = false;
this.inFlight_ = true;
+ chrome.send('queryHistory',
+ [this.searchText_, endTime, this.queryCursor_, RESULTS_PER_PAGE]);
};
/**
diff --git a/chrome/browser/ui/webui/history_ui.cc b/chrome/browser/ui/webui/history_ui.cc
index 5fb4e99..2a29398 100644
--- a/chrome/browser/ui/webui/history_ui.cc
+++ b/chrome/browser/ui/webui/history_ui.cc
@@ -209,21 +209,38 @@ void BrowsingHistoryHandler::QueryHistory(
void BrowsingHistoryHandler::HandleQueryHistory(const ListValue* args) {
history::QueryOptions options;
- int search_depth = 0;
- // There are three required arguments: the text to search for (which may be
- // empty), the requested depth (in months if searching, otherwise in days),
- // and the max number of results to return.
+ // Parse the arguments from JavaScript. There are four required arguments:
+ // - the text to search for (may be empty)
+ // - the end time of the range to search (see QueryOptions.end_time)
+ // - the search cursor, an opaque value from a previous query result, which
+ // allows this query to pick up where the previous one left off. May be
+ // null or undefined.
+ // - the maximum number of results to return (may be 0, meaning that there
+ // is no maximum)
string16 search_text = ExtractStringValue(args);
- if (!ExtractIntegerValueAtIndex(args, 1, &search_depth))
+
+ double end_time;
+ if (!args->GetDouble(1, &end_time))
return;
- if (!ExtractIntegerValueAtIndex(args, 2, &options.max_count))
+ if (end_time)
+ options.end_time = base::Time::FromJsTime(end_time);
+
+ const Value* cursor_value;
+ base::FundamentalValue cursor_not_specified(0.0);
+
+ // Get the cursor. It must be either null, or a list.
+ if (!args->Get(2, &cursor_value) ||
+ (!cursor_value->IsType(Value::TYPE_NULL) &&
+ !history::QueryCursor::FromValue(cursor_value, &options.cursor))) {
+ NOTREACHED() << "Failed to convert argument 2. ";
return;
+ }
- if (search_text.empty())
- SetQueryDepthInDays(options, search_depth);
- else
- SetQueryDepthInMonths(options, search_depth);
+ if (!ExtractIntegerValueAtIndex(args, 3, &options.max_count)) {
+ NOTREACHED() << "Failed to convert argument 3.";
+ return;
+ }
QueryHistory(search_text, options);
}
@@ -345,6 +362,7 @@ void BrowsingHistoryHandler::QueryComplete(
DictionaryValue info_value;
info_value.SetString("term", search_text);
info_value.SetBoolean("finished", results->reached_beginning());
+ info_value.Set("cursor", results->cursor().ToValue());
web_ui()->CallJavascriptFunction("historyResult", info_value, results_value);
}
diff --git a/chrome/test/data/webui/history_browsertest.js b/chrome/test/data/webui/history_browsertest.js
index dc8afaf..9e46c33 100644
--- a/chrome/test/data/webui/history_browsertest.js
+++ b/chrome/test/data/webui/history_browsertest.js
@@ -3,6 +3,7 @@
// found in the LICENSE file.
/** @const */ var TOTAL_RESULT_COUNT = 160;
+/** @const */ var WAIT_TIMEOUT = 200;
/**
* Create a fake history result with the given timestamp.
@@ -61,7 +62,10 @@ BaseHistoryWebUITest.prototype = {
* @param {Array} arguments The original arguments to queryHistory.
*/
queryHistoryStub_: function(args) {
- historyResult({ term: args[0], finished: true, }, []);
+ // Respond asynchronously to simulate real behavior in Chrome.
+ setTimeout(function() {
+ historyResult({ term: args[0], finished: true, cursor: 0 }, []);
+ }, 1);
},
};
@@ -77,24 +81,34 @@ HistoryWebUITest.prototype = {
__proto__: BaseHistoryWebUITest.prototype,
queryHistoryStub_: (function() {
- // The number of results to return per call to getHistory/searchHistory.
- var RESULT_SIZE = 20;
-
// Prepare a list of fake history results. The entries will begin at
// 1:00 AM on Sept 2, 2008, and will be spaced two minutes apart.
var timestamp = new Date(2008, 9, 2, 1, 0).getTime();
- var history = [];
- for (var i = 0; i < TOTAL_RESULT_COUNT; i++) {
- history.push(createHistoryEntry(timestamp));
- timestamp -= 2 * 60 * 1000; // Next visit is two minutes earlier.
- }
return function (args) {
- var start = args[1] * RESULT_SIZE;
- var results = history.slice(start, start + RESULT_SIZE);
- historyResult(
- { term: args[0], finished: start + RESULT_SIZE >= history.length, },
- results);
+ var searchText = args[0];
+ var endTime = args[1];
+ var cursor = args[2];
+ var maxCount = args[3];
+
+ // Generate the requested number of results.
+ var results = [];
+ var resultCount = Math.min(maxCount, TOTAL_RESULT_COUNT - cursor);
+ for (var i = 0; i < resultCount; i++) {
+ results.push(createHistoryEntry(timestamp));
+ timestamp -= 2 * 60 * 1000; // Next visit is two minutes earlier.
+ }
+
+ // Respond asynchronously to simulate real behavior in Chrome.
+ setTimeout(function() {
+ historyResult(
+ {
+ term: searchText,
+ finished: cursor + results.length >= TOTAL_RESULT_COUNT,
+ cursor: cursor + results.length,
+ },
+ results);
+ }, 1);
}
}()),
};
@@ -157,33 +171,39 @@ TEST_F('HistoryWebUITest', 'basicTest', function() {
// Go to the next page.
$('older-button').click();
- resultCount += document.querySelectorAll('.entry').length;
-
- // Check that the two pages include all of the entries.
- expectEquals(TOTAL_RESULT_COUNT, resultCount);
-
- // Check that the day header was properly continued -- the header for the
- // last day on the first page should be a substring of the header on the
- // second page. E.g. "Wed, Oct 8, 2008" and "Web, Oct 8, 2008 - cont'd".
- var newDayHeaders = document.querySelectorAll('.day');
- expectEquals(1, newDayHeaders.length);
- expectEquals(0,
- newDayHeaders[0].textContent.indexOf(dayHeaders[1].textContent));
-
- // Check that the "Newest" and "Newer" links are now visible, but the "Older"
- // link is hidden.
- expectEquals(3, document.querySelectorAll('.link-button').length)
- expectFalse($('newest-button').hidden);
- expectFalse($('newer-button').hidden);
- expectTrue($('older-button').hidden);
-
- // Go back to the first page, and check that the same day headers are there.
- $('newest-button').click();
- var newDayHeaders = document.querySelectorAll('.day');
- expectEquals(2, newDayHeaders.length);
- expectNotEquals(newDayHeaders[0].textContent, newDayHeaders[1].textContent);
- expectEquals(dayHeaders[0].textContent, newDayHeaders[0].textContent);
- expectEquals(dayHeaders[1].textContent, newDayHeaders[1].textContent);
-
- testDone();
+ setTimeout(function() {
+ resultCount += document.querySelectorAll('.entry').length;
+
+ // Check that the two pages include all of the entries.
+ expectEquals(TOTAL_RESULT_COUNT, resultCount);
+
+ // Check that the day header was properly continued -- the header for the
+ // last day on the first page should be a substring of the header on the
+ // second page. E.g. "Wed, Oct 8, 2008" and "Web, Oct 8, 2008 - cont'd".
+ var newDayHeaders = document.querySelectorAll('.day');
+ expectEquals(1, newDayHeaders.length);
+ expectEquals(0,
+ newDayHeaders[0].textContent.indexOf(dayHeaders[1].textContent));
+
+ // Check that the "Newest" and "Newer" links are now visible, but the
+ // "Older" link is hidden.
+ expectEquals(3, document.querySelectorAll('.link-button').length)
+ expectFalse($('newest-button').hidden);
+ expectFalse($('newer-button').hidden);
+ expectTrue($('older-button').hidden);
+
+ // Go back to the first page, and check that the same day headers are there.
+ $('newest-button').click();
+ setTimeout(function() {
+ var newDayHeaders = document.querySelectorAll('.day');
+ expectEquals(2, newDayHeaders.length);
+
+ expectNotEquals(newDayHeaders[0].textContent,
+ newDayHeaders[1].textContent);
+ expectEquals(dayHeaders[0].textContent, newDayHeaders[0].textContent);
+ expectEquals(dayHeaders[1].textContent, newDayHeaders[1].textContent);
+
+ testDone();
+ }, WAIT_TIMEOUT);
+ }, WAIT_TIMEOUT);
});