summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--base/files/important_file_writer.cc37
-rw-r--r--base/files/important_file_writer.h15
-rw-r--r--base/files/important_file_writer_unittest.cc73
-rw-r--r--base/prefs/json_pref_store.cc150
-rw-r--r--base/prefs/json_pref_store.h48
-rw-r--r--base/prefs/json_pref_store_unittest.cc168
-rw-r--r--base/prefs/persistent_pref_store.h25
-rw-r--r--base/prefs/pref_filter.h26
-rw-r--r--chrome/browser/prefs/chrome_pref_service_factory.cc17
-rw-r--r--chrome/browser/prefs/interceptable_pref_filter.cc38
-rw-r--r--chrome/browser/prefs/interceptable_pref_filter.h64
-rw-r--r--chrome/browser/prefs/pref_hash_filter.cc116
-rw-r--r--chrome/browser/prefs/pref_hash_filter.h31
-rw-r--r--chrome/browser/prefs/pref_hash_filter_unittest.cc254
-rw-r--r--chrome/browser/prefs/profile_pref_store_manager.cc76
-rw-r--r--chrome/browser/prefs/profile_pref_store_manager.h4
-rw-r--r--chrome/browser/prefs/profile_pref_store_manager_unittest.cc20
-rw-r--r--chrome/browser/prefs/tracked/segregated_pref_store.cc52
-rw-r--r--chrome/browser/prefs/tracked/segregated_pref_store.h17
-rw-r--r--chrome/browser/prefs/tracked/segregated_pref_store_unittest.cc31
-rw-r--r--chrome/browser/prefs/tracked/tracked_preferences_migration.cc267
-rw-r--r--chrome/browser/prefs/tracked/tracked_preferences_migration.h37
-rw-r--r--chrome/browser/prefs/tracked/tracked_preferences_migration_unittest.cc501
-rw-r--r--chrome/chrome_browser.gypi4
-rw-r--r--chrome/chrome_tests_unit.gypi1
25 files changed, 1590 insertions, 482 deletions
diff --git a/base/files/important_file_writer.cc b/base/files/important_file_writer.cc
index 9cbbd73..1250824 100644
--- a/base/files/important_file_writer.cc
+++ b/base/files/important_file_writer.cc
@@ -23,6 +23,7 @@
#include "base/metrics/histogram.h"
#include "base/strings/string_number_conversions.h"
#include "base/task_runner.h"
+#include "base/task_runner_util.h"
#include "base/threading/thread.h"
#include "base/time/time.h"
@@ -93,13 +94,13 @@ bool ImportantFileWriter::WriteFileAtomically(const FilePath& path,
return true;
}
-ImportantFileWriter::ImportantFileWriter(
- const FilePath& path, base::SequencedTaskRunner* task_runner)
- : path_(path),
- task_runner_(task_runner),
- serializer_(NULL),
- commit_interval_(TimeDelta::FromMilliseconds(
- kDefaultCommitIntervalMs)) {
+ImportantFileWriter::ImportantFileWriter(const FilePath& path,
+ base::SequencedTaskRunner* task_runner)
+ : path_(path),
+ task_runner_(task_runner),
+ serializer_(NULL),
+ commit_interval_(TimeDelta::FromMilliseconds(kDefaultCommitIntervalMs)),
+ weak_factory_(this) {
DCHECK(CalledOnValidThread());
DCHECK(task_runner_.get());
}
@@ -126,11 +127,13 @@ void ImportantFileWriter::WriteNow(const std::string& data) {
if (HasPendingWrite())
timer_.Stop();
- if (!task_runner_->PostTask(
+ if (!base::PostTaskAndReplyWithResult(
+ task_runner_,
FROM_HERE,
MakeCriticalClosure(
- Bind(IgnoreResult(&ImportantFileWriter::WriteFileAtomically),
- path_, data)))) {
+ Bind(&ImportantFileWriter::WriteFileAtomically, path_, data)),
+ Bind(&ImportantFileWriter::ForwardSuccessfulWrite,
+ weak_factory_.GetWeakPtr()))) {
// Posting the task to background message loop is not expected
// to fail, but if it does, avoid losing data and just hit the disk
// on the current thread.
@@ -164,4 +167,18 @@ void ImportantFileWriter::DoScheduledWrite() {
serializer_ = NULL;
}
+void ImportantFileWriter::RegisterOnNextSuccessfulWriteCallback(
+ const base::Closure& on_next_successful_write) {
+ DCHECK(on_next_successful_write_.is_null());
+ on_next_successful_write_ = on_next_successful_write;
+}
+
+void ImportantFileWriter::ForwardSuccessfulWrite(bool result) {
+ DCHECK(CalledOnValidThread());
+ if (result && !on_next_successful_write_.is_null()) {
+ on_next_successful_write_.Run();
+ on_next_successful_write_.Reset();
+ }
+}
+
} // namespace base
diff --git a/base/files/important_file_writer.h b/base/files/important_file_writer.h
index ba1c745..bdc6ba1 100644
--- a/base/files/important_file_writer.h
+++ b/base/files/important_file_writer.h
@@ -9,6 +9,7 @@
#include "base/base_export.h"
#include "base/basictypes.h"
+#include "base/callback.h"
#include "base/files/file_path.h"
#include "base/memory/ref_counted.h"
#include "base/threading/non_thread_safe.h"
@@ -89,6 +90,11 @@ class BASE_EXPORT ImportantFileWriter : public NonThreadSafe {
// Serialize data pending to be saved and execute write on backend thread.
void DoScheduledWrite();
+ // Registers |on_next_successful_write| to be called once, on the next
+ // successful write event. Only one callback can be set at once.
+ void RegisterOnNextSuccessfulWriteCallback(
+ const base::Closure& on_next_successful_write);
+
TimeDelta commit_interval() const {
return commit_interval_;
}
@@ -98,6 +104,13 @@ class BASE_EXPORT ImportantFileWriter : public NonThreadSafe {
}
private:
+ // If |result| is true and |on_next_successful_write_| is set, invokes
+ // |on_successful_write_| and then resets it; no-ops otherwise.
+ void ForwardSuccessfulWrite(bool result);
+
+ // Invoked once and then reset on the next successful write event.
+ base::Closure on_next_successful_write_;
+
// Path being written to.
const FilePath path_;
@@ -113,6 +126,8 @@ class BASE_EXPORT ImportantFileWriter : public NonThreadSafe {
// Time delta after which scheduled data will be written to disk.
TimeDelta commit_interval_;
+ WeakPtrFactory<ImportantFileWriter> weak_factory_;
+
DISALLOW_COPY_AND_ASSIGN(ImportantFileWriter);
};
diff --git a/base/files/important_file_writer_unittest.cc b/base/files/important_file_writer_unittest.cc
index 02a5f76..3f62fe4 100644
--- a/base/files/important_file_writer_unittest.cc
+++ b/base/files/important_file_writer_unittest.cc
@@ -4,6 +4,7 @@
#include "base/files/important_file_writer.h"
+#include "base/bind.h"
#include "base/compiler_specific.h"
#include "base/file_util.h"
#include "base/files/file_path.h"
@@ -41,6 +42,41 @@ class DataSerializer : public ImportantFileWriter::DataSerializer {
const std::string data_;
};
+class SuccessfulWriteObserver {
+ public:
+ SuccessfulWriteObserver() : successful_write_observed_(false) {}
+
+ // Register on_successful_write() to be called on the next successful write
+ // of |writer|.
+ void ObserveNextSuccessfulWrite(ImportantFileWriter* writer);
+
+ // Returns true if a successful write was observed via on_successful_write()
+ // and resets the observation state to false regardless.
+ bool GetAndResetObservationState();
+
+ private:
+ void on_successful_write() {
+ EXPECT_FALSE(successful_write_observed_);
+ successful_write_observed_ = true;
+ }
+
+ bool successful_write_observed_;
+
+ DISALLOW_COPY_AND_ASSIGN(SuccessfulWriteObserver);
+};
+
+void SuccessfulWriteObserver::ObserveNextSuccessfulWrite(
+ ImportantFileWriter* writer) {
+ writer->RegisterOnNextSuccessfulWriteCallback(base::Bind(
+ &SuccessfulWriteObserver::on_successful_write, base::Unretained(this)));
+}
+
+bool SuccessfulWriteObserver::GetAndResetObservationState() {
+ bool was_successful_write_observed = successful_write_observed_;
+ successful_write_observed_ = false;
+ return was_successful_write_observed;
+}
+
} // namespace
class ImportantFileWriterTest : public testing::Test {
@@ -52,6 +88,7 @@ class ImportantFileWriterTest : public testing::Test {
}
protected:
+ SuccessfulWriteObserver successful_write_observer_;
FilePath file_;
MessageLoop loop_;
@@ -62,11 +99,47 @@ class ImportantFileWriterTest : public testing::Test {
TEST_F(ImportantFileWriterTest, Basic) {
ImportantFileWriter writer(file_, MessageLoopProxy::current().get());
EXPECT_FALSE(PathExists(writer.path()));
+ EXPECT_FALSE(successful_write_observer_.GetAndResetObservationState());
+ writer.WriteNow("foo");
+ RunLoop().RunUntilIdle();
+
+ EXPECT_FALSE(successful_write_observer_.GetAndResetObservationState());
+ ASSERT_TRUE(PathExists(writer.path()));
+ EXPECT_EQ("foo", GetFileContent(writer.path()));
+}
+
+TEST_F(ImportantFileWriterTest, BasicWithSuccessfulWriteObserver) {
+ ImportantFileWriter writer(file_, MessageLoopProxy::current().get());
+ EXPECT_FALSE(PathExists(writer.path()));
+ EXPECT_FALSE(successful_write_observer_.GetAndResetObservationState());
+ successful_write_observer_.ObserveNextSuccessfulWrite(&writer);
writer.WriteNow("foo");
RunLoop().RunUntilIdle();
+ // Confirm that the observer is invoked.
+ EXPECT_TRUE(successful_write_observer_.GetAndResetObservationState());
ASSERT_TRUE(PathExists(writer.path()));
EXPECT_EQ("foo", GetFileContent(writer.path()));
+
+ // Confirm that re-installing the observer works for another write.
+ EXPECT_FALSE(successful_write_observer_.GetAndResetObservationState());
+ successful_write_observer_.ObserveNextSuccessfulWrite(&writer);
+ writer.WriteNow("bar");
+ RunLoop().RunUntilIdle();
+
+ EXPECT_TRUE(successful_write_observer_.GetAndResetObservationState());
+ ASSERT_TRUE(PathExists(writer.path()));
+ EXPECT_EQ("bar", GetFileContent(writer.path()));
+
+ // Confirm that writing again without re-installing the observer doesn't
+ // result in a notification.
+ EXPECT_FALSE(successful_write_observer_.GetAndResetObservationState());
+ writer.WriteNow("baz");
+ RunLoop().RunUntilIdle();
+
+ EXPECT_FALSE(successful_write_observer_.GetAndResetObservationState());
+ ASSERT_TRUE(PathExists(writer.path()));
+ EXPECT_EQ("baz", GetFileContent(writer.path()));
}
TEST_F(ImportantFileWriterTest, ScheduleWrite) {
diff --git a/base/prefs/json_pref_store.cc b/base/prefs/json_pref_store.cc
index b0a16cc..e99d64f 100644
--- a/base/prefs/json_pref_store.cc
+++ b/base/prefs/json_pref_store.cc
@@ -39,6 +39,8 @@ class FileThreadDeserializer
void Start(const base::FilePath& path) {
DCHECK(origin_loop_proxy_->BelongsToCurrentThread());
+ // TODO(gab): This should use PostTaskAndReplyWithResult instead of using
+ // the |error_| member to pass data across tasks.
sequenced_task_runner_->PostTask(
FROM_HERE,
base::Bind(&FileThreadDeserializer::ReadFileAndReport,
@@ -59,7 +61,7 @@ class FileThreadDeserializer
// Reports deserialization result on the origin thread.
void ReportOnOriginThread() {
DCHECK(origin_loop_proxy_->BelongsToCurrentThread());
- delegate_->OnFileRead(value_.release(), error_, no_dir_);
+ delegate_->OnFileRead(value_.Pass(), error_, no_dir_);
}
static base::Value* DoReading(const base::FilePath& path,
@@ -161,7 +163,9 @@ JsonPrefStore::JsonPrefStore(const base::FilePath& filename,
writer_(filename, sequenced_task_runner),
pref_filter_(pref_filter.Pass()),
initialized_(false),
- read_error_(PREF_READ_ERROR_OTHER) {}
+ filtering_in_progress_(false),
+ read_error_(PREF_READ_ERROR_NONE) {
+}
bool JsonPrefStore::GetValue(const std::string& key,
const base::Value** result) const {
@@ -224,6 +228,12 @@ void JsonPrefStore::RemoveValue(const std::string& key) {
ReportValueChanged(key);
}
+void JsonPrefStore::RemoveValueSilently(const std::string& key) {
+ prefs_->RemovePath(key, NULL);
+ if (!read_only_)
+ writer_.ScheduleWrite(this);
+}
+
bool JsonPrefStore::ReadOnly() const {
return read_only_;
}
@@ -234,23 +244,26 @@ PersistentPrefStore::PrefReadError JsonPrefStore::GetReadError() const {
PersistentPrefStore::PrefReadError JsonPrefStore::ReadPrefs() {
if (path_.empty()) {
- OnFileRead(NULL, PREF_READ_ERROR_FILE_NOT_SPECIFIED, false);
+ OnFileRead(
+ scoped_ptr<base::Value>(), PREF_READ_ERROR_FILE_NOT_SPECIFIED, false);
return PREF_READ_ERROR_FILE_NOT_SPECIFIED;
}
PrefReadError error;
bool no_dir;
- base::Value* value =
- FileThreadDeserializer::DoReading(path_, &error, &no_dir);
- OnFileRead(value, error, no_dir);
- return error;
+ scoped_ptr<base::Value> value(
+ FileThreadDeserializer::DoReading(path_, &error, &no_dir));
+ OnFileRead(value.Pass(), error, no_dir);
+ return filtering_in_progress_ ? PREF_READ_ERROR_ASYNCHRONOUS_TASK_INCOMPLETE :
+ error;
}
-void JsonPrefStore::ReadPrefsAsync(ReadErrorDelegate *error_delegate) {
+void JsonPrefStore::ReadPrefsAsync(ReadErrorDelegate* error_delegate) {
initialized_ = false;
error_delegate_.reset(error_delegate);
if (path_.empty()) {
- OnFileRead(NULL, PREF_READ_ERROR_FILE_NOT_SPECIFIED, false);
+ OnFileRead(
+ scoped_ptr<base::Value>(), PREF_READ_ERROR_FILE_NOT_SPECIFIED, false);
return;
}
@@ -276,53 +289,63 @@ void JsonPrefStore::ReportValueChanged(const std::string& key) {
writer_.ScheduleWrite(this);
}
-void JsonPrefStore::OnFileRead(base::Value* value_owned,
+void JsonPrefStore::RegisterOnNextSuccessfulWriteCallback(
+ const base::Closure& on_next_successful_write) {
+ writer_.RegisterOnNextSuccessfulWriteCallback(on_next_successful_write);
+}
+
+void JsonPrefStore::OnFileRead(scoped_ptr<base::Value> value,
PersistentPrefStore::PrefReadError error,
bool no_dir) {
- scoped_ptr<base::Value> value(value_owned);
- read_error_ = error;
+ scoped_ptr<base::DictionaryValue> unfiltered_prefs(new base::DictionaryValue);
- if (no_dir) {
- FOR_EACH_OBSERVER(PrefStore::Observer,
- observers_,
- OnInitializationCompleted(false));
- return;
- }
+ read_error_ = error;
- initialized_ = true;
+ bool initialization_successful = !no_dir;
- switch (error) {
- case PREF_READ_ERROR_ACCESS_DENIED:
- case PREF_READ_ERROR_FILE_OTHER:
- case PREF_READ_ERROR_FILE_LOCKED:
- case PREF_READ_ERROR_JSON_TYPE:
- case PREF_READ_ERROR_FILE_NOT_SPECIFIED:
- read_only_ = true;
- break;
- case PREF_READ_ERROR_NONE:
- DCHECK(value.get());
- prefs_.reset(static_cast<base::DictionaryValue*>(value.release()));
- break;
- case PREF_READ_ERROR_NO_FILE:
- // If the file just doesn't exist, maybe this is first run. In any case
- // there's no harm in writing out default prefs in this case.
- break;
- case PREF_READ_ERROR_JSON_PARSE:
- case PREF_READ_ERROR_JSON_REPEAT:
- break;
- default:
- NOTREACHED() << "Unknown error: " << error;
+ if (initialization_successful) {
+ switch (read_error_) {
+ case PREF_READ_ERROR_ACCESS_DENIED:
+ case PREF_READ_ERROR_FILE_OTHER:
+ case PREF_READ_ERROR_FILE_LOCKED:
+ case PREF_READ_ERROR_JSON_TYPE:
+ case PREF_READ_ERROR_FILE_NOT_SPECIFIED:
+ read_only_ = true;
+ break;
+ case PREF_READ_ERROR_NONE:
+ DCHECK(value.get());
+ unfiltered_prefs.reset(
+ static_cast<base::DictionaryValue*>(value.release()));
+ break;
+ case PREF_READ_ERROR_NO_FILE:
+ // If the file just doesn't exist, maybe this is first run. In any case
+ // there's no harm in writing out default prefs in this case.
+ break;
+ case PREF_READ_ERROR_JSON_PARSE:
+ case PREF_READ_ERROR_JSON_REPEAT:
+ break;
+ case PREF_READ_ERROR_ASYNCHRONOUS_TASK_INCOMPLETE:
+ // This is a special error code to be returned by ReadPrefs when it
+ // can't complete synchronously, it should never be returned by the read
+ // operation itself.
+ NOTREACHED();
+ break;
+ case PREF_READ_ERROR_MAX_ENUM:
+ NOTREACHED();
+ break;
+ }
}
- if (pref_filter_ && pref_filter_->FilterOnLoad(prefs_.get()))
- writer_.ScheduleWrite(this);
-
- if (error_delegate_.get() && error != PREF_READ_ERROR_NONE)
- error_delegate_->OnError(error);
-
- FOR_EACH_OBSERVER(PrefStore::Observer,
- observers_,
- OnInitializationCompleted(true));
+ if (pref_filter_) {
+ filtering_in_progress_ = true;
+ const PrefFilter::PostFilterOnLoadCallback post_filter_on_load_callback(
+ base::Bind(
+ &JsonPrefStore::FinalizeFileRead, this, initialization_successful));
+ pref_filter_->FilterOnLoad(post_filter_on_load_callback,
+ unfiltered_prefs.Pass());
+ } else {
+ FinalizeFileRead(initialization_successful, unfiltered_prefs.Pass(), false);
+ }
}
JsonPrefStore::~JsonPrefStore() {
@@ -337,3 +360,32 @@ bool JsonPrefStore::SerializeData(std::string* output) {
serializer.set_pretty_print(true);
return serializer.Serialize(*prefs_);
}
+
+void JsonPrefStore::FinalizeFileRead(bool initialization_successful,
+ scoped_ptr<base::DictionaryValue> prefs,
+ bool schedule_write) {
+ filtering_in_progress_ = false;
+
+ if (!initialization_successful) {
+ FOR_EACH_OBSERVER(PrefStore::Observer,
+ observers_,
+ OnInitializationCompleted(false));
+ return;
+ }
+
+ prefs_ = prefs.Pass();
+
+ initialized_ = true;
+
+ if (schedule_write && !read_only_)
+ writer_.ScheduleWrite(this);
+
+ if (error_delegate_ && read_error_ != PREF_READ_ERROR_NONE)
+ error_delegate_->OnError(read_error_);
+
+ FOR_EACH_OBSERVER(PrefStore::Observer,
+ observers_,
+ OnInitializationCompleted(true));
+
+ return;
+}
diff --git a/base/prefs/json_pref_store.h b/base/prefs/json_pref_store.h
index ad13feb..49e74ee 100644
--- a/base/prefs/json_pref_store.h
+++ b/base/prefs/json_pref_store.h
@@ -9,10 +9,12 @@
#include <string>
#include "base/basictypes.h"
+#include "base/callback_forward.h"
#include "base/compiler_specific.h"
#include "base/files/file_path.h"
#include "base/files/important_file_writer.h"
#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/observer_list.h"
#include "base/prefs/base_prefs_export.h"
@@ -32,7 +34,8 @@ class Value;
// A writable PrefStore implementation that is used for user preferences.
class BASE_PREFS_EXPORT JsonPrefStore
: public PersistentPrefStore,
- public base::ImportantFileWriter::DataSerializer {
+ public base::ImportantFileWriter::DataSerializer,
+ public base::SupportsWeakPtr<JsonPrefStore> {
public:
// Returns instance of SequencedTaskRunner which guarantees that file
// operations on the same file will be executed in sequenced order.
@@ -63,16 +66,37 @@ class BASE_PREFS_EXPORT JsonPrefStore
virtual void RemoveValue(const std::string& key) OVERRIDE;
virtual bool ReadOnly() const OVERRIDE;
virtual PrefReadError GetReadError() const OVERRIDE;
+ // Note this method may be asynchronous if this instance has a |pref_filter_|
+ // in which case it will return PREF_READ_ERROR_ASYNCHRONOUS_TASK_INCOMPLETE.
+ // See details in pref_filter.h.
virtual PrefReadError ReadPrefs() OVERRIDE;
virtual void ReadPrefsAsync(ReadErrorDelegate* error_delegate) OVERRIDE;
virtual void CommitPendingWrite() OVERRIDE;
virtual void ReportValueChanged(const std::string& key) OVERRIDE;
- // This method is called after JSON file has been read. Method takes
- // ownership of the |value| pointer. Note, this method is used with
- // asynchronous file reading, so class exposes it only for the internal needs.
- // (read: do not call it manually).
- void OnFileRead(base::Value* value_owned, PrefReadError error, bool no_dir);
+ // Just like RemoveValue(), but doesn't notify observers. Used when doing some
+ // cleanup that shouldn't otherwise alert observers.
+ void RemoveValueSilently(const std::string& key);
+
+ // Registers |on_next_successful_write| to be called once, on the next
+ // successful write event of |writer_|.
+ void RegisterOnNextSuccessfulWriteCallback(
+ const base::Closure& on_next_successful_write);
+
+ // This method is called after the JSON file has been read. It then hands
+ // |value| (or an empty dictionary in some read error cases) to the
+ // |pref_filter| if one is set. It also gives a callback pointing at
+ // FinalizeFileRead() to that |pref_filter_| which is then responsible for
+ // invoking it when done. If there is no |pref_filter_|, FinalizeFileRead()
+ // is invoked directly.
+ // Note, this method is used with asynchronous file reading, so this class
+ // exposes it only for the internal needs (read: do not call it manually).
+ // TODO(gab): Move this method to the private section and hand a callback to
+ // it to FileThreadDeserializer rather than exposing this public method and
+ // giving a JsonPrefStore* to FileThreadDeserializer.
+ void OnFileRead(scoped_ptr<base::Value> value,
+ PrefReadError error,
+ bool no_dir);
private:
virtual ~JsonPrefStore();
@@ -80,6 +104,17 @@ class BASE_PREFS_EXPORT JsonPrefStore
// ImportantFileWriter::DataSerializer overrides:
virtual bool SerializeData(std::string* output) OVERRIDE;
+ // This method is called after the JSON file has been read and the result has
+ // potentially been intercepted and modified by |pref_filter_|.
+ // |initialization_successful| is pre-determined by OnFileRead() and should
+ // be used when reporting OnInitializationCompleted().
+ // |schedule_write| indicates whether a write should be immediately scheduled
+ // (typically because the |pref_filter_| has already altered the |prefs|) --
+ // this will be ignored if this store is read-only.
+ void FinalizeFileRead(bool initialization_successful,
+ scoped_ptr<base::DictionaryValue> prefs,
+ bool schedule_write);
+
base::FilePath path_;
const scoped_refptr<base::SequencedTaskRunner> sequenced_task_runner_;
@@ -96,6 +131,7 @@ class BASE_PREFS_EXPORT JsonPrefStore
scoped_ptr<ReadErrorDelegate> error_delegate_;
bool initialized_;
+ bool filtering_in_progress_;
PrefReadError read_error_;
std::set<std::string> keys_need_empty_value_;
diff --git a/base/prefs/json_pref_store_unittest.cc b/base/prefs/json_pref_store_unittest.cc
index f4e1e51..9e273c7 100644
--- a/base/prefs/json_pref_store_unittest.cc
+++ b/base/prefs/json_pref_store_unittest.cc
@@ -4,6 +4,7 @@
#include "base/prefs/json_pref_store.h"
+#include "base/bind.h"
#include "base/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/memory/ref_counted.h"
@@ -25,6 +26,50 @@ namespace {
const char kHomePage[] = "homepage";
+// A PrefFilter that will intercept all calls to FilterOnLoad() and hold on
+// to the |prefs| until explicitly asked to release them.
+class InterceptingPrefFilter : public PrefFilter {
+ public:
+ InterceptingPrefFilter();
+ virtual ~InterceptingPrefFilter();
+
+ // PrefFilter implementation:
+ virtual void FilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents) OVERRIDE;
+ virtual void FilterUpdate(const std::string& path) OVERRIDE {}
+ virtual void FilterSerializeData(
+ const base::DictionaryValue* pref_store_contents) OVERRIDE {}
+
+ bool has_intercepted_prefs() const { return intercepted_prefs_ != NULL; }
+
+ // Finalize an intercepted read, handing |intercept_prefs_| back to its
+ // JsonPrefStore.
+ void ReleasePrefs();
+
+ private:
+ PostFilterOnLoadCallback post_filter_on_load_callback_;
+ scoped_ptr<base::DictionaryValue> intercepted_prefs_;
+
+ DISALLOW_COPY_AND_ASSIGN(InterceptingPrefFilter);
+};
+
+InterceptingPrefFilter::InterceptingPrefFilter() {}
+InterceptingPrefFilter::~InterceptingPrefFilter() {}
+
+void InterceptingPrefFilter::FilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents) {
+ post_filter_on_load_callback_ = post_filter_on_load_callback;
+ intercepted_prefs_ = pref_store_contents.Pass();
+}
+
+void InterceptingPrefFilter::ReleasePrefs() {
+ EXPECT_FALSE(post_filter_on_load_callback_.is_null());
+ post_filter_on_load_callback_.Run(intercepted_prefs_.Pass(), false);
+ post_filter_on_load_callback_.Reset();
+}
+
class MockPrefStoreObserver : public PrefStore::Observer {
public:
MOCK_METHOD1(OnPrefValueChanged, void (const std::string&));
@@ -157,7 +202,7 @@ void RunBasicJsonPrefStoreTest(JsonPrefStore* pref_store,
TEST_F(JsonPrefStoreTest, Basic) {
ASSERT_TRUE(base::CopyFile(data_dir_.AppendASCII("read.json"),
- temp_dir_.path().AppendASCII("write.json")));
+ temp_dir_.path().AppendASCII("write.json")));
// Test that the persistent value can be loaded.
base::FilePath input_file = temp_dir_.path().AppendASCII("write.json");
@@ -167,7 +212,8 @@ TEST_F(JsonPrefStoreTest, Basic) {
message_loop_.message_loop_proxy().get(),
scoped_ptr<PrefFilter>());
ASSERT_EQ(PersistentPrefStore::PREF_READ_ERROR_NONE, pref_store->ReadPrefs());
- ASSERT_FALSE(pref_store->ReadOnly());
+ EXPECT_FALSE(pref_store->ReadOnly());
+ EXPECT_TRUE(pref_store->IsInitializationComplete());
// The JSON file looks like this:
// {
@@ -185,7 +231,7 @@ TEST_F(JsonPrefStoreTest, Basic) {
TEST_F(JsonPrefStoreTest, BasicAsync) {
ASSERT_TRUE(base::CopyFile(data_dir_.AppendASCII("read.json"),
- temp_dir_.path().AppendASCII("write.json")));
+ temp_dir_.path().AppendASCII("write.json")));
// Test that the persistent value can be loaded.
base::FilePath input_file = temp_dir_.path().AppendASCII("write.json");
@@ -208,7 +254,8 @@ TEST_F(JsonPrefStoreTest, BasicAsync) {
RunLoop().RunUntilIdle();
pref_store->RemoveObserver(&mock_observer);
- ASSERT_FALSE(pref_store->ReadOnly());
+ EXPECT_FALSE(pref_store->ReadOnly());
+ EXPECT_TRUE(pref_store->IsInitializationComplete());
}
// The JSON file looks like this:
@@ -301,4 +348,117 @@ TEST_F(JsonPrefStoreTest, AsyncNonExistingFile) {
EXPECT_FALSE(pref_store->ReadOnly());
}
+TEST_F(JsonPrefStoreTest, ReadWithInterceptor) {
+ ASSERT_TRUE(base::CopyFile(data_dir_.AppendASCII("read.json"),
+ temp_dir_.path().AppendASCII("write.json")));
+
+ // Test that the persistent value can be loaded.
+ base::FilePath input_file = temp_dir_.path().AppendASCII("write.json");
+ ASSERT_TRUE(PathExists(input_file));
+
+ scoped_ptr<InterceptingPrefFilter> intercepting_pref_filter(
+ new InterceptingPrefFilter());
+ InterceptingPrefFilter* raw_intercepting_pref_filter_ =
+ intercepting_pref_filter.get();
+ scoped_refptr<JsonPrefStore> pref_store =
+ new JsonPrefStore(input_file,
+ message_loop_.message_loop_proxy().get(),
+ intercepting_pref_filter.PassAs<PrefFilter>());
+
+ ASSERT_EQ(PersistentPrefStore::PREF_READ_ERROR_ASYNCHRONOUS_TASK_INCOMPLETE,
+ pref_store->ReadPrefs());
+ EXPECT_FALSE(pref_store->ReadOnly());
+
+ // The store shouldn't be considered initialized until the interceptor
+ // returns.
+ EXPECT_TRUE(raw_intercepting_pref_filter_->has_intercepted_prefs());
+ EXPECT_FALSE(pref_store->IsInitializationComplete());
+ EXPECT_FALSE(pref_store->GetValue(kHomePage, NULL));
+
+ raw_intercepting_pref_filter_->ReleasePrefs();
+
+ EXPECT_FALSE(raw_intercepting_pref_filter_->has_intercepted_prefs());
+ EXPECT_TRUE(pref_store->IsInitializationComplete());
+ EXPECT_TRUE(pref_store->GetValue(kHomePage, NULL));
+
+ // The JSON file looks like this:
+ // {
+ // "homepage": "http://www.cnn.com",
+ // "some_directory": "/usr/local/",
+ // "tabs": {
+ // "new_windows_in_tabs": true,
+ // "max_tabs": 20
+ // }
+ // }
+
+ RunBasicJsonPrefStoreTest(
+ pref_store.get(), input_file, data_dir_.AppendASCII("write.golden.json"));
+}
+
+TEST_F(JsonPrefStoreTest, ReadAsyncWithInterceptor) {
+ ASSERT_TRUE(base::CopyFile(data_dir_.AppendASCII("read.json"),
+ temp_dir_.path().AppendASCII("write.json")));
+
+ // Test that the persistent value can be loaded.
+ base::FilePath input_file = temp_dir_.path().AppendASCII("write.json");
+ ASSERT_TRUE(PathExists(input_file));
+
+ scoped_ptr<InterceptingPrefFilter> intercepting_pref_filter(
+ new InterceptingPrefFilter());
+ InterceptingPrefFilter* raw_intercepting_pref_filter_ =
+ intercepting_pref_filter.get();
+ scoped_refptr<JsonPrefStore> pref_store =
+ new JsonPrefStore(input_file,
+ message_loop_.message_loop_proxy().get(),
+ intercepting_pref_filter.PassAs<PrefFilter>());
+
+ MockPrefStoreObserver mock_observer;
+ pref_store->AddObserver(&mock_observer);
+
+ // Ownership of the |mock_error_delegate| is handed to the |pref_store| below.
+ MockReadErrorDelegate* mock_error_delegate = new MockReadErrorDelegate;
+
+ {
+ pref_store->ReadPrefsAsync(mock_error_delegate);
+
+ EXPECT_CALL(mock_observer, OnInitializationCompleted(true)).Times(0);
+ // EXPECT_CALL(*mock_error_delegate,
+ // OnError(PersistentPrefStore::PREF_READ_ERROR_NONE)).Times(0);
+ RunLoop().RunUntilIdle();
+
+ EXPECT_FALSE(pref_store->ReadOnly());
+ EXPECT_TRUE(raw_intercepting_pref_filter_->has_intercepted_prefs());
+ EXPECT_FALSE(pref_store->IsInitializationComplete());
+ EXPECT_FALSE(pref_store->GetValue(kHomePage, NULL));
+ }
+
+ {
+ EXPECT_CALL(mock_observer, OnInitializationCompleted(true)).Times(1);
+ // EXPECT_CALL(*mock_error_delegate,
+ // OnError(PersistentPrefStore::PREF_READ_ERROR_NONE)).Times(0);
+
+ raw_intercepting_pref_filter_->ReleasePrefs();
+
+ EXPECT_FALSE(pref_store->ReadOnly());
+ EXPECT_FALSE(raw_intercepting_pref_filter_->has_intercepted_prefs());
+ EXPECT_TRUE(pref_store->IsInitializationComplete());
+ EXPECT_TRUE(pref_store->GetValue(kHomePage, NULL));
+ }
+
+ pref_store->RemoveObserver(&mock_observer);
+
+ // The JSON file looks like this:
+ // {
+ // "homepage": "http://www.cnn.com",
+ // "some_directory": "/usr/local/",
+ // "tabs": {
+ // "new_windows_in_tabs": true,
+ // "max_tabs": 20
+ // }
+ // }
+
+ RunBasicJsonPrefStoreTest(
+ pref_store.get(), input_file, data_dir_.AppendASCII("write.golden.json"));
+}
+
} // namespace base
diff --git a/base/prefs/persistent_pref_store.h b/base/prefs/persistent_pref_store.h
index 44f3442..177d860 100644
--- a/base/prefs/persistent_pref_store.h
+++ b/base/prefs/persistent_pref_store.h
@@ -17,19 +17,22 @@ class BASE_PREFS_EXPORT PersistentPrefStore : public WriteablePrefStore {
public:
// Unique integer code for each type of error so we can report them
// distinctly in a histogram.
- // NOTE: Don't change the order here as it will change the server's meaning
- // of the histogram.
+ // NOTE: Don't change the explicit values of the enums as it will change the
+ // server's meaning of the histogram.
enum PrefReadError {
PREF_READ_ERROR_NONE = 0,
- PREF_READ_ERROR_JSON_PARSE,
- PREF_READ_ERROR_JSON_TYPE,
- PREF_READ_ERROR_ACCESS_DENIED,
- PREF_READ_ERROR_FILE_OTHER,
- PREF_READ_ERROR_FILE_LOCKED,
- PREF_READ_ERROR_NO_FILE,
- PREF_READ_ERROR_JSON_REPEAT,
- PREF_READ_ERROR_OTHER,
- PREF_READ_ERROR_FILE_NOT_SPECIFIED,
+ PREF_READ_ERROR_JSON_PARSE = 1,
+ PREF_READ_ERROR_JSON_TYPE = 2,
+ PREF_READ_ERROR_ACCESS_DENIED = 3,
+ PREF_READ_ERROR_FILE_OTHER = 4,
+ PREF_READ_ERROR_FILE_LOCKED = 5,
+ PREF_READ_ERROR_NO_FILE = 6,
+ PREF_READ_ERROR_JSON_REPEAT = 7,
+ // PREF_READ_ERROR_OTHER = 8, // Deprecated.
+ PREF_READ_ERROR_FILE_NOT_SPECIFIED = 9,
+ // Indicates that ReadPrefs() couldn't complete synchronously and is waiting
+ // for an asynchronous task to complete first.
+ PREF_READ_ERROR_ASYNCHRONOUS_TASK_INCOMPLETE = 10,
PREF_READ_ERROR_MAX_ENUM
};
diff --git a/base/prefs/pref_filter.h b/base/prefs/pref_filter.h
index ce07f92..1020029 100644
--- a/base/prefs/pref_filter.h
+++ b/base/prefs/pref_filter.h
@@ -7,6 +7,8 @@
#include <string>
+#include "base/callback_forward.h"
+#include "base/memory/scoped_ptr.h"
#include "base/prefs/base_prefs_export.h"
namespace base {
@@ -18,15 +20,25 @@ class Value;
// Currently supported only by JsonPrefStore.
class BASE_PREFS_EXPORT PrefFilter {
public:
+ // A callback to be invoked when |prefs| have been read (and possibly
+ // pre-modified) and are now ready to be handed back to this callback's
+ // builder. |schedule_write| indicates whether a write should be immediately
+ // scheduled (typically because the |prefs| were pre-modified).
+ typedef base::Callback<void(scoped_ptr<base::DictionaryValue> prefs,
+ bool schedule_write)> PostFilterOnLoadCallback;
+
virtual ~PrefFilter() {}
- // Receives notification when the pref store data has been loaded but before
- // Observers are notified.
- // Changes made by a PrefFilter during FilterOnLoad do not result in
- // notifications to |PrefStore::Observer|s.
- // Implementations should return true if they modify the dictionary, to allow
- // the changes to be persisted.
- virtual bool FilterOnLoad(base::DictionaryValue* pref_store_contents) = 0;
+ // This method is given ownership of the |pref_store_contents| read from disk
+ // before the underlying PersistentPrefStore gets to use them. It must hand
+ // them back via |post_filter_on_load_callback|, but may modify them first.
+ // Note: This method is asynchronous, which may make calls like
+ // PersistentPrefStore::ReadPrefs() asynchronous. The owner of filtered
+ // PersistentPrefStores should handle this to make the reads look synchronous
+ // to external users (see SegregatedPrefStore::ReadPrefs() for an example).
+ virtual void FilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents) = 0;
// Receives notification when a pref store value is changed, before Observers
// are notified.
diff --git a/chrome/browser/prefs/chrome_pref_service_factory.cc b/chrome/browser/prefs/chrome_pref_service_factory.cc
index 0f1d01b..5546942 100644
--- a/chrome/browser/prefs/chrome_pref_service_factory.cc
+++ b/chrome/browser/prefs/chrome_pref_service_factory.cc
@@ -158,10 +158,25 @@ const PrefHashFilter::TrackedPreferenceMetadata kTrackedPrefs[] = {
PrefHashFilter::NO_ENFORCEMENT,
PrefHashFilter::TRACKING_STRATEGY_ATOMIC
},
+ {
+ // Protecting kPreferenceResetTime does two things:
+ // 1) It ensures this isn't accidently set by someone stomping the pref
+ // file.
+ // 2) More importantly, it declares kPreferenceResetTime as a protected
+ // pref which is required for it to be visible when queried via the
+ // SegregatedPrefStore. This is because it's written directly in the
+ // protected JsonPrefStore by that store's PrefHashFilter if there was
+ // a reset in FilterOnLoad and SegregatedPrefStore will not look for it
+ // in the protected JsonPrefStore unless it's declared as a protected
+ // preference here.
+ 15, prefs::kPreferenceResetTime,
+ PrefHashFilter::ENFORCE_ON_LOAD,
+ PrefHashFilter::TRACKING_STRATEGY_ATOMIC
+ },
};
// The count of tracked preferences IDs across all platforms.
-const size_t kTrackedPrefsReportingIDsCount = 15;
+const size_t kTrackedPrefsReportingIDsCount = 16;
COMPILE_ASSERT(kTrackedPrefsReportingIDsCount >= arraysize(kTrackedPrefs),
need_to_increment_ids_count);
diff --git a/chrome/browser/prefs/interceptable_pref_filter.cc b/chrome/browser/prefs/interceptable_pref_filter.cc
new file mode 100644
index 0000000..dcf252b
--- /dev/null
+++ b/chrome/browser/prefs/interceptable_pref_filter.cc
@@ -0,0 +1,38 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "chrome/browser/prefs/interceptable_pref_filter.h"
+
+#include "base/bind.h"
+
+InterceptablePrefFilter::InterceptablePrefFilter() {}
+InterceptablePrefFilter::~InterceptablePrefFilter() {}
+
+void InterceptablePrefFilter::FilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents) {
+ if (filter_on_load_interceptor_.is_null()) {
+ FinalizeFilterOnLoad(post_filter_on_load_callback,
+ pref_store_contents.Pass(),
+ false);
+ } else {
+ // Note, in practice (in the implementation as it was in May 2014) it would
+ // be okay to pass an unretained |this| pointer below, but in order to avoid
+ // having to augment the API everywhere to explicitly enforce the ownership
+ // model as it happens to currently be: make the relationship simpler by
+ // weakly binding the FinalizeFilterOnLoadCallback below to |this|.
+ const FinalizeFilterOnLoadCallback finalize_filter_on_load(
+ base::Bind(&InterceptablePrefFilter::FinalizeFilterOnLoad,
+ AsWeakPtr(),
+ post_filter_on_load_callback));
+ filter_on_load_interceptor_.Run(finalize_filter_on_load,
+ pref_store_contents.Pass());
+ }
+}
+
+void InterceptablePrefFilter::InterceptNextFilterOnLoad(
+ const FilterOnLoadInterceptor& filter_on_load_interceptor) {
+ DCHECK(filter_on_load_interceptor_.is_null());
+ filter_on_load_interceptor_ = filter_on_load_interceptor;
+}
diff --git a/chrome/browser/prefs/interceptable_pref_filter.h b/chrome/browser/prefs/interceptable_pref_filter.h
new file mode 100644
index 0000000..a1ba00d
--- /dev/null
+++ b/chrome/browser/prefs/interceptable_pref_filter.h
@@ -0,0 +1,64 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CHROME_BROWSER_PREFS_INTERCEPTABLE_PREF_FILTER_H_
+#define CHROME_BROWSER_PREFS_INTERCEPTABLE_PREF_FILTER_H_
+
+#include "base/callback.h"
+#include "base/memory/weak_ptr.h"
+#include "base/prefs/pref_filter.h"
+#include "base/values.h"
+
+// A partial implementation of a PrefFilter whose FilterOnLoad call may be
+// intercepted by a FilterOnLoadInterceptor. Implementations of
+// InterceptablePrefFilter are expected to override FinalizeFilterOnLoad rather
+// than re-overriding FilterOnLoad.
+class InterceptablePrefFilter
+ : public PrefFilter,
+ public base::SupportsWeakPtr<InterceptablePrefFilter> {
+ public:
+ // A callback to be invoked by a FilterOnLoadInterceptor when its ready to
+ // hand back the |prefs| it was handed for early filtering. |prefs_altered|
+ // indicates whether the |prefs| were actually altered by the
+ // FilterOnLoadInterceptor before being handed back.
+ typedef base::Callback<void(scoped_ptr<base::DictionaryValue> prefs,
+ bool prefs_altered)> FinalizeFilterOnLoadCallback;
+
+ // A callback to be invoked from FilterOnLoad. It takes ownership of prefs
+ // and may modify them before handing them back to this
+ // InterceptablePrefFilter via |finalize_filter_on_load|.
+ typedef base::Callback<
+ void(const FinalizeFilterOnLoadCallback& finalize_filter_on_load,
+ scoped_ptr<base::DictionaryValue> prefs)> FilterOnLoadInterceptor;
+
+ InterceptablePrefFilter();
+ virtual ~InterceptablePrefFilter();
+
+ // PrefFilter partial implementation.
+ virtual void FilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents) OVERRIDE;
+
+ // Registers |filter_on_load_interceptor| to intercept the next FilterOnLoad
+ // event. At most one FilterOnLoadInterceptor should be registered per
+ // PrefFilter.
+ void InterceptNextFilterOnLoad(
+ const FilterOnLoadInterceptor& filter_on_load_interceptor);
+
+ private:
+ // Does any extra filtering required by the implementation of this
+ // InterceptablePrefFilter and hands back the |pref_store_contents| to the
+ // initial caller of FilterOnLoad.
+ virtual void FinalizeFilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents,
+ bool prefs_altered) = 0;
+
+ // Callback to be invoked only once (and subsequently reset) on the next
+ // FilterOnLoad event. It will be allowed to modify the |prefs| handed to
+ // FilterOnLoad before handing them back to this PrefHashFilter.
+ FilterOnLoadInterceptor filter_on_load_interceptor_;
+};
+
+#endif // CHROME_BROWSER_PREFS_INTERCEPTABLE_PREF_FILTER_H_
diff --git a/chrome/browser/prefs/pref_hash_filter.cc b/chrome/browser/prefs/pref_hash_filter.cc
index e35df37..4ba42e3 100644
--- a/chrome/browser/prefs/pref_hash_filter.cc
+++ b/chrome/browser/prefs/pref_hash_filter.cc
@@ -8,7 +8,6 @@
#include "base/logging.h"
#include "base/metrics/histogram.h"
-#include "base/prefs/persistent_pref_store.h"
#include "base/prefs/pref_service.h"
#include "base/prefs/pref_store.h"
#include "base/strings/string_number_conversions.h"
@@ -92,51 +91,6 @@ void PrefHashFilter::ClearResetTime(PrefService* user_prefs) {
user_prefs->ClearPref(prefs::kPreferenceResetTime);
}
-void PrefHashFilter::MigrateValues(PersistentPrefStore* source,
- PersistentPrefStore* destination) {
- bool commit_source = false;
- bool commit_destination = false;
-
- scoped_ptr<PrefHashStoreTransaction> transaction =
- pref_hash_store_->BeginTransaction();
- for (TrackedPreferencesMap::const_iterator it = tracked_paths_.begin();
- it != tracked_paths_.end();
- ++it) {
- const base::Value* source_value = NULL;
- if (source->GetValue(it->first, &source_value)) {
- if (!destination->GetValue(it->first, NULL)) {
- base::DictionaryValue temp_dictionary;
- // Copy the value from |source| into a suitable place for a
- // TrackedPreference to act on it.
- temp_dictionary.Set(it->first, source_value->DeepCopy());
- // Check whether the value is correct according to our MAC. May remove
- // the value from |temp_dictionary|.
- it->second->EnforceAndReport(&temp_dictionary, transaction.get());
- // Now take the value as it appears in |temp_dictionary| and put it in
- // |destination|.
- scoped_ptr<base::Value> checked_value;
- if (temp_dictionary.Remove(it->first, &checked_value)) {
- destination->SetValue(it->first, checked_value.release());
- commit_destination = true;
- }
- }
- source->RemoveValue(it->first);
- commit_source = true;
- }
- }
-
- // Order these such that a crash at any point is still recoverable. We assume
- // that they are configured such that the writes will occur on worker threads
- // in the order that we asked for them.
- if (commit_destination)
- destination->CommitPendingWrite();
- transaction.reset();
- // If we crash here, we will just delete the values from |source| in a future
- // invocation of MigrateValues.
- if (commit_source)
- source->CommitPendingWrite();
-}
-
void PrefHashFilter::Initialize(const PrefStore& pref_store) {
scoped_ptr<PrefHashStoreTransaction> hash_store_transaction(
pref_hash_store_->BeginTransaction());
@@ -150,40 +104,6 @@ void PrefHashFilter::Initialize(const PrefStore& pref_store) {
}
}
-// Validates loaded preference values according to stored hashes, reports
-// validation results via UMA, and updates hashes in case of mismatch.
-bool PrefHashFilter::FilterOnLoad(base::DictionaryValue* pref_store_contents) {
- DCHECK(pref_store_contents);
- base::TimeTicks checkpoint = base::TimeTicks::Now();
-
- bool did_reset = false;
- {
- scoped_ptr<PrefHashStoreTransaction> hash_store_transaction(
- pref_hash_store_->BeginTransaction());
- for (TrackedPreferencesMap::const_iterator it = tracked_paths_.begin();
- it != tracked_paths_.end(); ++it) {
- if (it->second->EnforceAndReport(pref_store_contents,
- hash_store_transaction.get())) {
- did_reset = true;
- }
- }
- }
-
- if (did_reset) {
- pref_store_contents->Set(prefs::kPreferenceResetTime,
- new base::StringValue(base::Int64ToString(
- base::Time::Now().ToInternalValue())));
- }
-
- // TODO(gab): Remove this histogram by Feb 21 2014; after sufficient timing
- // data has been gathered from the wild to be confident this doesn't
- // significantly affect startup.
- UMA_HISTOGRAM_TIMES("Settings.FilterOnLoadTime",
- base::TimeTicks::Now() - checkpoint);
-
- return did_reset;
-}
-
// Marks |path| has having changed if it is part of |tracked_paths_|. A new hash
// will be stored for it the next time FilterSerializeData() is invoked.
void PrefHashFilter::FilterUpdate(const std::string& path) {
@@ -234,3 +154,39 @@ void PrefHashFilter::FilterSerializeData(
// necessary anyways).
pref_hash_store_->CommitPendingWrite();
}
+
+void PrefHashFilter::FinalizeFilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents,
+ bool prefs_altered) {
+ DCHECK(pref_store_contents);
+ base::TimeTicks checkpoint = base::TimeTicks::Now();
+
+ bool did_reset = false;
+ {
+ scoped_ptr<PrefHashStoreTransaction> hash_store_transaction(
+ pref_hash_store_->BeginTransaction());
+ for (TrackedPreferencesMap::const_iterator it = tracked_paths_.begin();
+ it != tracked_paths_.end(); ++it) {
+ if (it->second->EnforceAndReport(pref_store_contents.get(),
+ hash_store_transaction.get())) {
+ did_reset = true;
+ prefs_altered = true;
+ }
+ }
+ }
+
+ if (did_reset) {
+ pref_store_contents->Set(prefs::kPreferenceResetTime,
+ new base::StringValue(base::Int64ToString(
+ base::Time::Now().ToInternalValue())));
+ }
+
+ // TODO(gab): Remove this histogram by Feb 21 2014; after sufficient timing
+ // data has been gathered from the wild to be confident this doesn't
+ // significantly affect startup.
+ UMA_HISTOGRAM_TIMES("Settings.FilterOnLoadTime",
+ base::TimeTicks::Now() - checkpoint);
+
+ post_filter_on_load_callback.Run(pref_store_contents.Pass(), prefs_altered);
+}
diff --git a/chrome/browser/prefs/pref_hash_filter.h b/chrome/browser/prefs/pref_hash_filter.h
index 7cb08313..cb795b5 100644
--- a/chrome/browser/prefs/pref_hash_filter.h
+++ b/chrome/browser/prefs/pref_hash_filter.h
@@ -7,18 +7,15 @@
#include <map>
#include <set>
-#include <string>
#include <vector>
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "base/containers/scoped_ptr_hash_map.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/prefs/pref_filter.h"
+#include "chrome/browser/prefs/interceptable_pref_filter.h"
#include "chrome/browser/prefs/pref_hash_store.h"
#include "chrome/browser/prefs/tracked/tracked_preference.h"
-class PersistentPrefStore;
class PrefService;
class PrefStore;
@@ -35,7 +32,7 @@ class PrefRegistrySyncable;
// Intercepts preference values as they are loaded from disk and verifies them
// using a PrefHashStore. Keeps the PrefHashStore contents up to date as values
// are changed.
-class PrefHashFilter : public PrefFilter {
+class PrefHashFilter : public InterceptablePrefFilter {
public:
enum EnforcementLevel {
NO_ENFORCEMENT,
@@ -84,23 +81,23 @@ class PrefHashFilter : public PrefFilter {
// |pref_store|.
void Initialize(const PrefStore& pref_store);
- // Migrates protected values from |source| to |destination|. Values are
- // migrated if they are protected according to this filter's configuration,
- // the corresponding key has no value in |destination|, and the value in
- // |source| is trusted according to this filter's PrefHashStore. Regardless of
- // the state of |destination| or the trust status, the protected values will
- // be removed from |source|.
- void MigrateValues(PersistentPrefStore* source,
- PersistentPrefStore* destination);
-
- // PrefFilter implementation.
- virtual bool FilterOnLoad(base::DictionaryValue* pref_store_contents)
- OVERRIDE;
+ // PrefFilter remaining implementation.
virtual void FilterUpdate(const std::string& path) OVERRIDE;
virtual void FilterSerializeData(
const base::DictionaryValue* pref_store_contents) OVERRIDE;
private:
+ // InterceptablePrefFilter implementation.
+ virtual void FinalizeFilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents,
+ bool prefs_altered) OVERRIDE;
+
+ // Callback to be invoked only once (and subsequently reset) on the next
+ // FilterOnLoad event. It will be allowed to modify the |prefs| handed to
+ // FilterOnLoad before handing them back to this PrefHashFilter.
+ FilterOnLoadInterceptor filter_on_load_interceptor_;
+
// A map of paths to TrackedPreferences; this map owns this individual
// TrackedPreference objects.
typedef base::ScopedPtrHashMap<std::string, TrackedPreference>
diff --git a/chrome/browser/prefs/pref_hash_filter_unittest.cc b/chrome/browser/prefs/pref_hash_filter_unittest.cc
index 1c7dfc6..87886d6 100644
--- a/chrome/browser/prefs/pref_hash_filter_unittest.cc
+++ b/chrome/browser/prefs/pref_hash_filter_unittest.cc
@@ -320,7 +320,9 @@ std::vector<PrefHashFilter::TrackedPreferenceMetadata> GetConfiguration(
class PrefHashFilterTest
: public testing::TestWithParam<PrefHashFilter::EnforcementLevel> {
public:
- PrefHashFilterTest() : mock_pref_hash_store_(NULL) {}
+ PrefHashFilterTest() : mock_pref_hash_store_(NULL),
+ pref_store_contents_(new base::DictionaryValue),
+ last_filter_on_load_modified_prefs_(false) {}
virtual void SetUp() OVERRIDE {
// Construct a PrefHashFilter and MockPrefHashStore for the test.
@@ -343,18 +345,41 @@ class PrefHashFilterTest
}
bool RecordedReset() {
- return pref_store_contents_.Get(prefs::kPreferenceResetTime, NULL);
+ return pref_store_contents_->Get(prefs::kPreferenceResetTime, NULL);
+ }
+
+ // Calls FilterOnLoad() on |pref_hash_Filter_|. |pref_store_contents_| is
+ // handed off, but should be given back to us synchronously through
+ // GetPrefsBack() as there is no FilterOnLoadInterceptor installed on
+ // |pref_hash_filter_|.
+ void DoFilterOnLoad(bool expect_prefs_modifications) {
+ pref_hash_filter_->FilterOnLoad(
+ base::Bind(&PrefHashFilterTest::GetPrefsBack, base::Unretained(this),
+ expect_prefs_modifications),
+ pref_store_contents_.Pass());
}
MockPrefHashStore* mock_pref_hash_store_;
- base::DictionaryValue pref_store_contents_;
+ scoped_ptr<base::DictionaryValue> pref_store_contents_;
+ bool last_filter_on_load_modified_prefs_;
scoped_ptr<PrefHashFilter> pref_hash_filter_;
+ private:
+ // Stores |prefs| back in |pref_store_contents| and ensure
+ // |expected_schedule_write| matches the reported |schedule_write|.
+ void GetPrefsBack(bool expected_schedule_write,
+ scoped_ptr<base::DictionaryValue> prefs,
+ bool schedule_write) {
+ pref_store_contents_ = prefs.Pass();
+ EXPECT_TRUE(pref_store_contents_);
+ EXPECT_EQ(expected_schedule_write, schedule_write);
+ }
+
DISALLOW_COPY_AND_ASSIGN(PrefHashFilterTest);
};
TEST_P(PrefHashFilterTest, EmptyAndUnchanged) {
- ASSERT_FALSE(pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(false);
// All paths checked.
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
@@ -487,14 +512,14 @@ TEST_P(PrefHashFilterTest, MultiplePrefsFilterSerializeData) {
}
TEST_P(PrefHashFilterTest, EmptyAndUnknown) {
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_FALSE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kSplitPref, NULL));
// NULL values are always trusted by the PrefHashStore.
mock_pref_hash_store_->SetCheckResult(
kAtomicPref, PrefHashStoreTransaction::TRUSTED_UNKNOWN_VALUE);
mock_pref_hash_store_->SetCheckResult(
kSplitPref, PrefHashStoreTransaction::TRUSTED_UNKNOWN_VALUE);
- ASSERT_FALSE(pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(false);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(2u, mock_pref_hash_store_->stored_paths_count());
@@ -517,23 +542,22 @@ TEST_P(PrefHashFilterTest, EmptyAndUnknown) {
TEST_P(PrefHashFilterTest, InitialValueUnknown) {
// Ownership of these values is transfered to |pref_store_contents_|.
base::StringValue* string_value = new base::StringValue("string value");
- pref_store_contents_.Set(kAtomicPref, string_value);
+ pref_store_contents_->Set(kAtomicPref, string_value);
base::DictionaryValue* dict_value = new base::DictionaryValue;
dict_value->SetString("a", "foo");
dict_value->SetInteger("b", 1234);
- pref_store_contents_.Set(kSplitPref, dict_value);
+ pref_store_contents_->Set(kSplitPref, dict_value);
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, NULL));
mock_pref_hash_store_->SetCheckResult(
kAtomicPref, PrefHashStoreTransaction::UNTRUSTED_UNKNOWN_VALUE);
mock_pref_hash_store_->SetCheckResult(
kSplitPref, PrefHashStoreTransaction::UNTRUSTED_UNKNOWN_VALUE);
// If we are enforcing, expect this to report changes.
- ASSERT_EQ(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD,
- pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(2u, mock_pref_hash_store_->stored_paths_count());
@@ -550,10 +574,10 @@ TEST_P(PrefHashFilterTest, InitialValueUnknown) {
if (GetParam() == PrefHashFilter::ENFORCE_ON_LOAD) {
// Ensure the prefs were cleared and the hashes for NULL were restored if
// the current enforcement level denies seeding.
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
ASSERT_EQ(NULL, stored_atomic_value.first);
- ASSERT_FALSE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kSplitPref, NULL));
ASSERT_EQ(NULL, stored_split_value.first);
ASSERT_TRUE(RecordedReset());
@@ -561,12 +585,12 @@ TEST_P(PrefHashFilterTest, InitialValueUnknown) {
// Otherwise the values should have remained intact and the hashes should
// have been updated to match them.
const base::Value* atomic_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, &atomic_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, &atomic_value_in_store));
ASSERT_EQ(string_value, atomic_value_in_store);
ASSERT_EQ(string_value, stored_atomic_value.first);
const base::Value* split_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, &split_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, &split_value_in_store));
ASSERT_EQ(dict_value, split_value_in_store);
ASSERT_EQ(dict_value, stored_split_value.first);
@@ -578,21 +602,21 @@ TEST_P(PrefHashFilterTest, InitialValueUnknown) {
TEST_P(PrefHashFilterTest, InitialValueTrustedUnknown) {
// Ownership of this value is transfered to |pref_store_contents_|.
base::Value* string_value = base::Value::CreateStringValue("test");
- pref_store_contents_.Set(kAtomicPref, string_value);
+ pref_store_contents_->Set(kAtomicPref, string_value);
base::DictionaryValue* dict_value = new base::DictionaryValue;
dict_value->SetString("a", "foo");
dict_value->SetInteger("b", 1234);
- pref_store_contents_.Set(kSplitPref, dict_value);
+ pref_store_contents_->Set(kSplitPref, dict_value);
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, NULL));
mock_pref_hash_store_->SetCheckResult(
kAtomicPref, PrefHashStoreTransaction::TRUSTED_UNKNOWN_VALUE);
mock_pref_hash_store_->SetCheckResult(
kSplitPref, PrefHashStoreTransaction::TRUSTED_UNKNOWN_VALUE);
- ASSERT_FALSE(pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(false);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(2u, mock_pref_hash_store_->stored_paths_count());
@@ -600,7 +624,7 @@ TEST_P(PrefHashFilterTest, InitialValueTrustedUnknown) {
// Seeding is always allowed for trusted unknown values.
const base::Value* atomic_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, &atomic_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, &atomic_value_in_store));
ASSERT_EQ(string_value, atomic_value_in_store);
MockPrefHashStore::ValuePtrStrategyPair stored_atomic_value =
mock_pref_hash_store_->stored_value(kAtomicPref);
@@ -609,7 +633,7 @@ TEST_P(PrefHashFilterTest, InitialValueTrustedUnknown) {
stored_atomic_value.second);
const base::Value* split_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, &split_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, &split_value_in_store));
ASSERT_EQ(dict_value, split_value_in_store);
MockPrefHashStore::ValuePtrStrategyPair stored_split_value =
mock_pref_hash_store_->stored_value(kSplitPref);
@@ -622,17 +646,17 @@ TEST_P(PrefHashFilterTest, InitialValueTrustedUnknown) {
TEST_P(PrefHashFilterTest, InitialValueChanged) {
// Ownership of this value is transfered to |pref_store_contents_|.
base::Value* int_value = base::Value::CreateIntegerValue(1234);
- pref_store_contents_.Set(kAtomicPref, int_value);
+ pref_store_contents_->Set(kAtomicPref, int_value);
base::DictionaryValue* dict_value = new base::DictionaryValue;
dict_value->SetString("a", "foo");
dict_value->SetInteger("b", 1234);
dict_value->SetInteger("c", 56);
dict_value->SetBoolean("d", false);
- pref_store_contents_.Set(kSplitPref, dict_value);
+ pref_store_contents_->Set(kSplitPref, dict_value);
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, NULL));
mock_pref_hash_store_->SetCheckResult(kAtomicPref,
PrefHashStoreTransaction::CHANGED);
@@ -644,8 +668,7 @@ TEST_P(PrefHashFilterTest, InitialValueChanged) {
mock_invalid_keys.push_back("c");
mock_pref_hash_store_->SetInvalidKeysResult(kSplitPref, mock_invalid_keys);
- ASSERT_EQ(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD,
- pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(2u, mock_pref_hash_store_->stored_paths_count());
@@ -662,13 +685,13 @@ TEST_P(PrefHashFilterTest, InitialValueChanged) {
if (GetParam() == PrefHashFilter::ENFORCE_ON_LOAD) {
// Ensure the atomic pref was cleared and the hash for NULL was restored if
// the current enforcement level prevents changes.
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
ASSERT_EQ(NULL, stored_atomic_value.first);
// The split pref on the other hand should only have been stripped of its
// invalid keys.
const base::Value* split_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, &split_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, &split_value_in_store));
ASSERT_EQ(2U, dict_value->size());
ASSERT_FALSE(dict_value->HasKey("a"));
ASSERT_TRUE(dict_value->HasKey("b"));
@@ -681,12 +704,12 @@ TEST_P(PrefHashFilterTest, InitialValueChanged) {
// Otherwise the value should have remained intact and the hash should have
// been updated to match it.
const base::Value* atomic_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, &atomic_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, &atomic_value_in_store));
ASSERT_EQ(int_value, atomic_value_in_store);
ASSERT_EQ(int_value, stored_atomic_value.first);
const base::Value* split_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, &split_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, &split_value_in_store));
ASSERT_EQ(dict_value, split_value_in_store);
ASSERT_EQ(4U, dict_value->size());
ASSERT_TRUE(dict_value->HasKey("a"));
@@ -701,13 +724,13 @@ TEST_P(PrefHashFilterTest, InitialValueChanged) {
}
TEST_P(PrefHashFilterTest, EmptyCleared) {
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_FALSE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kSplitPref, NULL));
mock_pref_hash_store_->SetCheckResult(kAtomicPref,
PrefHashStoreTransaction::CLEARED);
mock_pref_hash_store_->SetCheckResult(kSplitPref,
PrefHashStoreTransaction::CLEARED);
- ASSERT_FALSE(pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(false);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(2u, mock_pref_hash_store_->stored_paths_count());
@@ -715,14 +738,14 @@ TEST_P(PrefHashFilterTest, EmptyCleared) {
// Regardless of the enforcement level, the only thing that should be done is
// to restore the hash for NULL. The value itself should still be NULL.
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
MockPrefHashStore::ValuePtrStrategyPair stored_atomic_value =
mock_pref_hash_store_->stored_value(kAtomicPref);
ASSERT_EQ(NULL, stored_atomic_value.first);
ASSERT_EQ(PrefHashFilter::TRACKING_STRATEGY_ATOMIC,
stored_atomic_value.second);
- ASSERT_FALSE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kSplitPref, NULL));
MockPrefHashStore::ValuePtrStrategyPair stored_split_value =
mock_pref_hash_store_->stored_value(kSplitPref);
ASSERT_EQ(NULL, stored_split_value.first);
@@ -737,15 +760,14 @@ TEST_P(PrefHashFilterTest, InitialValueMigrated) {
// Ownership of this value is transfered to |pref_store_contents_|.
base::ListValue* list_value = new base::ListValue;
list_value->Append(base::Value::CreateStringValue("test"));
- pref_store_contents_.Set(kAtomicPref, list_value);
+ pref_store_contents_->Set(kAtomicPref, list_value);
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, NULL));
mock_pref_hash_store_->SetCheckResult(kAtomicPref,
PrefHashStoreTransaction::WEAK_LEGACY);
- ASSERT_EQ(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD,
- pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(1u, mock_pref_hash_store_->stored_paths_count());
@@ -758,7 +780,7 @@ TEST_P(PrefHashFilterTest, InitialValueMigrated) {
if (GetParam() == PrefHashFilter::ENFORCE_ON_LOAD) {
// Ensure the pref was cleared and the hash for NULL was restored if the
// current enforcement level prevents migration.
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
ASSERT_EQ(NULL, stored_atomic_value.first);
ASSERT_TRUE(RecordedReset());
@@ -766,7 +788,7 @@ TEST_P(PrefHashFilterTest, InitialValueMigrated) {
// Otherwise the value should have remained intact and the hash should have
// been updated to match it.
const base::Value* atomic_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, &atomic_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, &atomic_value_in_store));
ASSERT_EQ(list_value, atomic_value_in_store);
ASSERT_EQ(list_value, stored_atomic_value.first);
@@ -778,21 +800,21 @@ TEST_P(PrefHashFilterTest, InitialValueMigrated) {
TEST_P(PrefHashFilterTest, InitialValueUnchangedLegacyId) {
// Ownership of these values is transfered to |pref_store_contents_|.
base::StringValue* string_value = new base::StringValue("string value");
- pref_store_contents_.Set(kAtomicPref, string_value);
+ pref_store_contents_->Set(kAtomicPref, string_value);
base::DictionaryValue* dict_value = new base::DictionaryValue;
dict_value->SetString("a", "foo");
dict_value->SetInteger("b", 1234);
- pref_store_contents_.Set(kSplitPref, dict_value);
+ pref_store_contents_->Set(kSplitPref, dict_value);
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, NULL));
mock_pref_hash_store_->SetCheckResult(
kAtomicPref, PrefHashStoreTransaction::SECURE_LEGACY);
mock_pref_hash_store_->SetCheckResult(
kSplitPref, PrefHashStoreTransaction::SECURE_LEGACY);
- ASSERT_FALSE(pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(false);
ASSERT_EQ(arraysize(kTestTrackedPrefs),
mock_pref_hash_store_->checked_paths_count());
ASSERT_EQ(1u, mock_pref_hash_store_->transactions_performed());
@@ -808,7 +830,7 @@ TEST_P(PrefHashFilterTest, InitialValueUnchangedLegacyId) {
ASSERT_EQ(PrefHashFilter::TRACKING_STRATEGY_ATOMIC,
stored_atomic_value.second);
const base::Value* atomic_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, &atomic_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, &atomic_value_in_store));
ASSERT_EQ(string_value, atomic_value_in_store);
ASSERT_EQ(string_value, stored_atomic_value.first);
@@ -817,7 +839,7 @@ TEST_P(PrefHashFilterTest, InitialValueUnchangedLegacyId) {
ASSERT_EQ(PrefHashFilter::TRACKING_STRATEGY_SPLIT,
stored_split_value.second);
const base::Value* split_value_in_store;
- ASSERT_TRUE(pref_store_contents_.Get(kSplitPref, &split_value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kSplitPref, &split_value_in_store));
ASSERT_EQ(dict_value, split_value_in_store);
ASSERT_EQ(dict_value, stored_split_value.first);
@@ -832,15 +854,15 @@ TEST_P(PrefHashFilterTest, DontResetReportOnly) {
base::Value* report_only_val = base::Value::CreateIntegerValue(3);
base::DictionaryValue* report_only_split_val = new base::DictionaryValue;
report_only_split_val->SetInteger("a", 1234);
- pref_store_contents_.Set(kAtomicPref, int_value1);
- pref_store_contents_.Set(kAtomicPref2, int_value2);
- pref_store_contents_.Set(kReportOnlyPref, report_only_val);
- pref_store_contents_.Set(kReportOnlySplitPref, report_only_split_val);
+ pref_store_contents_->Set(kAtomicPref, int_value1);
+ pref_store_contents_->Set(kAtomicPref2, int_value2);
+ pref_store_contents_->Set(kReportOnlyPref, report_only_val);
+ pref_store_contents_->Set(kReportOnlySplitPref, report_only_split_val);
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref2, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kReportOnlyPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kReportOnlySplitPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref2, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kReportOnlyPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kReportOnlySplitPref, NULL));
mock_pref_hash_store_->SetCheckResult(kAtomicPref,
PrefHashStoreTransaction::CHANGED);
@@ -851,8 +873,7 @@ TEST_P(PrefHashFilterTest, DontResetReportOnly) {
mock_pref_hash_store_->SetCheckResult(kReportOnlySplitPref,
PrefHashStoreTransaction::CHANGED);
- ASSERT_EQ(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD,
- pref_hash_filter_->FilterOnLoad(&pref_store_contents_));
+ DoFilterOnLoad(GetParam() >= PrefHashFilter::ENFORCE_ON_LOAD);
// All prefs should be checked and a new hash should be stored for each tested
// pref.
ASSERT_EQ(arraysize(kTestTrackedPrefs),
@@ -862,8 +883,8 @@ TEST_P(PrefHashFilterTest, DontResetReportOnly) {
// No matter what the enforcement level is, the report only pref should never
// be reset.
- ASSERT_TRUE(pref_store_contents_.Get(kReportOnlyPref, NULL));
- ASSERT_TRUE(pref_store_contents_.Get(kReportOnlySplitPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kReportOnlyPref, NULL));
+ ASSERT_TRUE(pref_store_contents_->Get(kReportOnlySplitPref, NULL));
ASSERT_EQ(report_only_val,
mock_pref_hash_store_->stored_value(kReportOnlyPref).first);
ASSERT_EQ(report_only_split_val,
@@ -871,8 +892,8 @@ TEST_P(PrefHashFilterTest, DontResetReportOnly) {
// All other prefs should have been reset if the enforcement level allows it.
if (GetParam() == PrefHashFilter::ENFORCE_ON_LOAD) {
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref, NULL));
- ASSERT_FALSE(pref_store_contents_.Get(kAtomicPref2, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref, NULL));
+ ASSERT_FALSE(pref_store_contents_->Get(kAtomicPref2, NULL));
ASSERT_EQ(NULL, mock_pref_hash_store_->stored_value(kAtomicPref).first);
ASSERT_EQ(NULL, mock_pref_hash_store_->stored_value(kAtomicPref2).first);
@@ -880,8 +901,8 @@ TEST_P(PrefHashFilterTest, DontResetReportOnly) {
} else {
const base::Value* value_in_store;
const base::Value* value_in_store2;
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref, &value_in_store));
- ASSERT_TRUE(pref_store_contents_.Get(kAtomicPref2, &value_in_store2));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref, &value_in_store));
+ ASSERT_TRUE(pref_store_contents_->Get(kAtomicPref2, &value_in_store2));
ASSERT_EQ(int_value1, value_in_store);
ASSERT_EQ(int_value1,
mock_pref_hash_store_->stored_value(kAtomicPref).first);
@@ -894,103 +915,6 @@ TEST_P(PrefHashFilterTest, DontResetReportOnly) {
EXPECT_FALSE(mock_pref_hash_store_->commit_performed());
}
-TEST_P(PrefHashFilterTest, MigrateValuesTest) {
- // Migration configuration should only contain the protected preferences.
- std::vector<PrefHashFilter::TrackedPreferenceMetadata> configuration =
- GetConfiguration(GetParam());
- std::vector<PrefHashFilter::TrackedPreferenceMetadata>
- migration_configuration;
-
- for (std::vector<PrefHashFilter::TrackedPreferenceMetadata>::iterator it =
- configuration.begin();
- it != configuration.end();
- ++it) {
- if (it->enforcement_level >= PrefHashFilter::ENFORCE_ON_LOAD)
- migration_configuration.push_back(*it);
- }
-
- // Discards the default created pref_hash_filter_.
- InitializePrefHashFilter(migration_configuration);
-
- scoped_refptr<TestingPrefStore> source(new TestingPrefStore);
- scoped_refptr<TestingPrefStore> destination(new TestingPrefStore);
-
- // If enforcing, should be migrated.
- source->SetString(kAtomicPref, "foobar");
- mock_pref_hash_store_->SetCheckResult(kAtomicPref,
- PrefHashStoreTransaction::UNCHANGED);
-
- // If enforcing, should be discarded due to pre-existing value in
- // |destination|.
- source->SetString(kAtomicPref2, "foobar2");
- mock_pref_hash_store_->SetCheckResult(kAtomicPref2,
- PrefHashStoreTransaction::UNCHANGED);
-
- // If enforcing, should be kept preferentially to value from |source|. If not
- // enforcing, should still be unaffected.
- destination->SetString(kAtomicPref2, "foobar2 preexisting");
- // Should stay in |destination| in both scenarios.
- destination->SetString(kAtomicPref3, "foobar3");
- mock_pref_hash_store_->SetCheckResult(kAtomicPref3,
- PrefHashStoreTransaction::UNCHANGED);
-
- // When enforcing, should be discarded due to MAC mismatch. If not enforcing,
- // stays in |source|.
- source->SetString(kAtomicPref4, "foobar4");
- mock_pref_hash_store_->SetCheckResult(kAtomicPref4,
- PrefHashStoreTransaction::CHANGED);
-
- // Should remain in |source| in both scenarios.
- source->SetString(kReportOnlyPref, "helloworld");
- mock_pref_hash_store_->SetCheckResult(kReportOnlyPref,
- PrefHashStoreTransaction::UNCHANGED);
-
- // Perform the migration.
- pref_hash_filter_->MigrateValues(source, destination);
- ASSERT_EQ(1u, mock_pref_hash_store_->transactions_performed());
-
- if (GetParam() == PrefHashFilter::ENFORCE_ON_LOAD) {
- std::string value;
-
- ASSERT_FALSE(source->GetValue(kAtomicPref, NULL));
- ASSERT_FALSE(source->GetValue(kAtomicPref2, NULL));
- ASSERT_FALSE(source->GetValue(kAtomicPref3, NULL));
- ASSERT_FALSE(source->GetValue(kAtomicPref4, NULL));
- ASSERT_TRUE(source->GetString(kReportOnlyPref, &value));
- ASSERT_EQ("helloworld", value);
-
- ASSERT_TRUE(destination->GetString(kAtomicPref, &value));
- ASSERT_EQ("foobar", value);
- ASSERT_TRUE(destination->GetString(kAtomicPref2, &value));
- ASSERT_EQ("foobar2 preexisting", value);
- ASSERT_TRUE(destination->GetString(kAtomicPref3, &value));
- ASSERT_EQ("foobar3", value);
- ASSERT_FALSE(destination->GetValue(kReportOnlyPref, NULL));
- ASSERT_FALSE(destination->GetValue(kAtomicPref4, NULL));
- } else {
- std::string value;
-
- ASSERT_TRUE(source->GetString(kAtomicPref, &value));
- ASSERT_EQ("foobar", value);
- ASSERT_TRUE(source->GetString(kAtomicPref2, &value));
- ASSERT_EQ("foobar2", value);
- ASSERT_FALSE(source->GetString(kAtomicPref3, &value));
- ASSERT_TRUE(source->GetString(kAtomicPref4, &value));
- ASSERT_EQ("foobar4", value);
- ASSERT_TRUE(source->GetString(kReportOnlyPref, &value));
- ASSERT_EQ("helloworld", value);
-
- ASSERT_FALSE(destination->GetValue(kAtomicPref, NULL));
- ASSERT_TRUE(destination->GetString(kAtomicPref2, &value));
- ASSERT_EQ("foobar2 preexisting", value);
- ASSERT_TRUE(destination->GetString(kAtomicPref3, &value));
- ASSERT_EQ("foobar3", value);
- ASSERT_FALSE(destination->GetValue(kAtomicPref4, NULL));
- ASSERT_FALSE(destination->GetValue(kReportOnlyPref, NULL));
- }
- EXPECT_FALSE(mock_pref_hash_store_->commit_performed());
-}
-
INSTANTIATE_TEST_CASE_P(
PrefHashFilterTestInstance, PrefHashFilterTest,
testing::Values(PrefHashFilter::NO_ENFORCEMENT,
diff --git a/chrome/browser/prefs/profile_pref_store_manager.cc b/chrome/browser/prefs/profile_pref_store_manager.cc
index e93cb83..6558c4a 100644
--- a/chrome/browser/prefs/profile_pref_store_manager.cc
+++ b/chrome/browser/prefs/profile_pref_store_manager.cc
@@ -4,6 +4,7 @@
#include "chrome/browser/prefs/profile_pref_store_manager.h"
+#include "base/bind.h"
#include "base/file_util.h"
#include "base/json/json_file_value_serializer.h"
#include "base/logging.h"
@@ -14,6 +15,7 @@
#include "chrome/browser/prefs/pref_hash_store_impl.h"
#include "chrome/browser/prefs/tracked/pref_service_hash_store_contents.h"
#include "chrome/browser/prefs/tracked/segregated_pref_store.h"
+#include "chrome/browser/prefs/tracked/tracked_preferences_migration.h"
#include "chrome/common/chrome_constants.h"
#include "chrome/common/pref_names.h"
#include "components/user_prefs/pref_registry_syncable.h"
@@ -243,24 +245,11 @@ void ProfilePrefStoreManager::ResetAllPrefHashStores(PrefService* local_state) {
// static
base::Time ProfilePrefStoreManager::GetResetTime(PrefService* pref_service) {
- // It's a bit of a coincidence that this (and ClearResetTime) work(s). The
- // PrefHashFilter attached to the protected pref store will store the reset
- // time directly in the protected pref store without going through the
- // SegregatedPrefStore.
-
- // PrefHashFilter::GetResetTime will read the value through the pref service,
- // and thus through the SegregatedPrefStore. Even though it's not listed as
- // "protected" it will be read from the protected store preferentially to the
- // (NULL) value in the unprotected pref store.
return PrefHashFilter::GetResetTime(pref_service);
}
// static
void ProfilePrefStoreManager::ClearResetTime(PrefService* pref_service) {
- // PrefHashFilter::ClearResetTime will clear the value through the pref
- // service, and thus through the SegregatedPrefStore. Since it's not listed as
- // "protected" it will be migrated from the protected store to the unprotected
- // pref store before being deleted from the latter.
PrefHashFilter::ClearResetTime(pref_service);
}
@@ -283,6 +272,7 @@ PersistentPrefStore* ProfilePrefStoreManager::CreateProfilePrefStore(
std::vector<PrefHashFilter::TrackedPreferenceMetadata>
protected_configuration;
std::set<std::string> protected_pref_names;
+ std::set<std::string> unprotected_pref_names;
for (std::vector<PrefHashFilter::TrackedPreferenceMetadata>::const_iterator
it = tracking_configuration_.begin();
it != tracking_configuration_.end();
@@ -292,41 +282,49 @@ PersistentPrefStore* ProfilePrefStoreManager::CreateProfilePrefStore(
protected_pref_names.insert(it->name);
} else {
unprotected_configuration.push_back(*it);
+ unprotected_pref_names.insert(it->name);
}
}
- scoped_ptr<PrefFilter> unprotected_pref_hash_filter(
+ scoped_ptr<PrefHashFilter> unprotected_pref_hash_filter(
new PrefHashFilter(GetPrefHashStoreImpl().PassAs<PrefHashStore>(),
unprotected_configuration,
reporting_ids_count_));
- scoped_ptr<PrefFilter> protected_pref_hash_filter(
+ scoped_ptr<PrefHashFilter> protected_pref_hash_filter(
new PrefHashFilter(GetPrefHashStoreImpl().PassAs<PrefHashStore>(),
protected_configuration,
reporting_ids_count_));
- scoped_refptr<PersistentPrefStore> unprotected_pref_store(
+ PrefHashFilter* raw_unprotected_pref_hash_filter =
+ unprotected_pref_hash_filter.get();
+ PrefHashFilter* raw_protected_pref_hash_filter =
+ protected_pref_hash_filter.get();
+
+ scoped_refptr<JsonPrefStore> unprotected_pref_store(
new JsonPrefStore(GetPrefFilePathFromProfilePath(profile_path_),
io_task_runner,
- unprotected_pref_hash_filter.Pass()));
- scoped_refptr<PersistentPrefStore> protected_pref_store(new JsonPrefStore(
+ unprotected_pref_hash_filter.PassAs<PrefFilter>()));
+ scoped_refptr<JsonPrefStore> protected_pref_store(new JsonPrefStore(
profile_path_.Append(chrome::kProtectedPreferencesFilename),
io_task_runner,
- protected_pref_hash_filter.Pass()));
-
- // The on_initialized callback is used to migrate newly protected values from
- // the main Preferences store to the Protected Preferences store. It is also
- // responsible for the initial migration to a two-store model.
- return new SegregatedPrefStore(
- unprotected_pref_store,
- protected_pref_store,
+ protected_pref_hash_filter.PassAs<PrefFilter>()));
+
+ SetupTrackedPreferencesMigration(
+ unprotected_pref_names,
protected_pref_names,
- base::Bind(&PrefHashFilter::MigrateValues,
- base::Owned(new PrefHashFilter(
- CopyPrefHashStore(),
- protected_configuration,
- reporting_ids_count_)),
- unprotected_pref_store,
- protected_pref_store));
+ base::Bind(&JsonPrefStore::RemoveValueSilently,
+ unprotected_pref_store->AsWeakPtr()),
+ base::Bind(&JsonPrefStore::RemoveValueSilently,
+ protected_pref_store->AsWeakPtr()),
+ base::Bind(&JsonPrefStore::RegisterOnNextSuccessfulWriteCallback,
+ unprotected_pref_store->AsWeakPtr()),
+ base::Bind(&JsonPrefStore::RegisterOnNextSuccessfulWriteCallback,
+ protected_pref_store->AsWeakPtr()),
+ raw_unprotected_pref_hash_filter,
+ raw_protected_pref_hash_filter);
+
+ return new SegregatedPrefStore(unprotected_pref_store, protected_pref_store,
+ protected_pref_names);
}
void ProfilePrefStoreManager::UpdateProfileHashStoreIfRequired(
@@ -410,15 +408,3 @@ scoped_ptr<PrefHashStoreImpl> ProfilePrefStoreManager::GetPrefHashStoreImpl() {
scoped_ptr<HashStoreContents>(new PrefServiceHashStoreContents(
profile_path_.AsUTF8Unsafe(), local_state_))));
}
-
-scoped_ptr<PrefHashStore> ProfilePrefStoreManager::CopyPrefHashStore() {
- DCHECK(kPlatformSupportsPreferenceTracking);
-
- PrefServiceHashStoreContents real_contents(profile_path_.AsUTF8Unsafe(),
- local_state_);
- return scoped_ptr<PrefHashStore>(new PrefHashStoreImpl(
- seed_,
- device_id_,
- scoped_ptr<HashStoreContents>(
- new DictionaryHashStoreContents(real_contents))));
-}
diff --git a/chrome/browser/prefs/profile_pref_store_manager.h b/chrome/browser/prefs/profile_pref_store_manager.h
index cf51551..fa2456b 100644
--- a/chrome/browser/prefs/profile_pref_store_manager.h
+++ b/chrome/browser/prefs/profile_pref_store_manager.h
@@ -108,10 +108,6 @@ class ProfilePrefStoreManager {
// if |kPlatformSupportsPreferenceTracking|.
scoped_ptr<PrefHashStoreImpl> GetPrefHashStoreImpl();
- // Returns a PrefHashStore that is a copy of the current state of the real
- // hash store.
- scoped_ptr<PrefHashStore> CopyPrefHashStore();
-
const base::FilePath profile_path_;
const std::vector<PrefHashFilter::TrackedPreferenceMetadata>
tracking_configuration_;
diff --git a/chrome/browser/prefs/profile_pref_store_manager_unittest.cc b/chrome/browser/prefs/profile_pref_store_manager_unittest.cc
index cca5947..c83c9b2 100644
--- a/chrome/browser/prefs/profile_pref_store_manager_unittest.cc
+++ b/chrome/browser/prefs/profile_pref_store_manager_unittest.cc
@@ -23,6 +23,7 @@
#include "base/strings/string_util.h"
#include "base/values.h"
#include "chrome/browser/prefs/pref_hash_filter.h"
+#include "chrome/common/pref_names.h"
#include "components/user_prefs/pref_registry_syncable.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -111,6 +112,17 @@ class ProfilePrefStoreManagerTest : public testing::Test {
std::string(),
user_prefs::PrefRegistrySyncable::UNSYNCABLE_PREF);
+ // As in chrome_pref_service_factory.cc, kPreferencesResetTime needs to be
+ // declared as protected in order to be read from the proper store by the
+ // SegregatedPrefStore. Only declare it after configured prefs have been
+ // registered above for this test as kPreferenceResetTime is already
+ // registered in ProfilePrefStoreManager::RegisterProfilePrefs.
+ PrefHashFilter::TrackedPreferenceMetadata pref_reset_time_config =
+ {configuration_.rbegin()->reporting_id + 1, prefs::kPreferenceResetTime,
+ PrefHashFilter::ENFORCE_ON_LOAD,
+ PrefHashFilter::TRACKING_STRATEGY_ATOMIC};
+ configuration_.push_back(pref_reset_time_config);
+
ASSERT_TRUE(profile_dir_.CreateUniqueTempDir());
ReloadConfiguration();
}
@@ -422,11 +434,7 @@ TEST_F(ProfilePrefStoreManagerTest, UnprotectedToProtectedWithoutTrust) {
WasResetRecorded());
}
-// This test does not directly verify that the values are moved from one pref
-// store to the other. segregated_pref_store_unittest.cc _does_ verify that
-// functionality.
-//
-// _This_ test verifies that preference values are correctly maintained when a
+// This test verifies that preference values are correctly maintained when a
// preference's protection state changes from protected to unprotected.
TEST_F(ProfilePrefStoreManagerTest, ProtectedToUnprotected) {
InitializePrefs();
@@ -438,7 +446,7 @@ TEST_F(ProfilePrefStoreManagerTest, ProtectedToUnprotected) {
it != configuration_.end();
++it) {
if (it->name == kProtectedAtomic) {
- configuration_.erase(it);
+ it->enforcement_level = PrefHashFilter::NO_ENFORCEMENT;
break;
}
}
diff --git a/chrome/browser/prefs/tracked/segregated_pref_store.cc b/chrome/browser/prefs/tracked/segregated_pref_store.cc
index a430d303..3a50b40 100644
--- a/chrome/browser/prefs/tracked/segregated_pref_store.cc
+++ b/chrome/browser/prefs/tracked/segregated_pref_store.cc
@@ -35,9 +35,6 @@ void SegregatedPrefStore::AggregatingObserver::OnInitializationCompleted(
DCHECK_LE(failed_sub_initializations_ + successful_sub_initializations_, 2);
if (failed_sub_initializations_ + successful_sub_initializations_ == 2) {
- if (!outer_->on_initialization_.is_null())
- outer_->on_initialization_.Run();
-
if (successful_sub_initializations_ == 2 && outer_->read_error_delegate_) {
PersistentPrefStore::PrefReadError read_error = outer_->GetReadError();
if (read_error != PersistentPrefStore::PREF_READ_ERROR_NONE)
@@ -54,14 +51,11 @@ void SegregatedPrefStore::AggregatingObserver::OnInitializationCompleted(
SegregatedPrefStore::SegregatedPrefStore(
const scoped_refptr<PersistentPrefStore>& default_pref_store,
const scoped_refptr<PersistentPrefStore>& selected_pref_store,
- const std::set<std::string>& selected_pref_names,
- const base::Closure& on_initialization)
+ const std::set<std::string>& selected_pref_names)
: default_pref_store_(default_pref_store),
selected_pref_store_(selected_pref_store),
selected_preference_names_(selected_pref_names),
- on_initialization_(on_initialization),
aggregating_observer_(this) {
-
default_pref_store_->AddObserver(&aggregating_observer_);
selected_pref_store_->AddObserver(&aggregating_observer_);
}
@@ -128,8 +122,15 @@ PersistentPrefStore::PrefReadError SegregatedPrefStore::GetReadError() const {
}
PersistentPrefStore::PrefReadError SegregatedPrefStore::ReadPrefs() {
+ // Note: Both of these stores own PrefFilters which makes ReadPrefs
+ // asynchronous. This is okay in this case as only the first call will be
+ // truly asynchronous, the second call will then unblock the migration in
+ // TrackedPreferencesMigrator and complete synchronously.
default_pref_store_->ReadPrefs();
- selected_pref_store_->ReadPrefs();
+ PersistentPrefStore::PrefReadError selected_store_read_error =
+ selected_pref_store_->ReadPrefs();
+ DCHECK_NE(PersistentPrefStore::PREF_READ_ERROR_ASYNCHRONOUS_TASK_INCOMPLETE,
+ selected_store_read_error);
return GetReadError();
}
@@ -150,34 +151,13 @@ SegregatedPrefStore::~SegregatedPrefStore() {
selected_pref_store_->RemoveObserver(&aggregating_observer_);
}
-const PersistentPrefStore* SegregatedPrefStore::StoreForKey(
- const std::string& key) const {
- if (ContainsKey(selected_preference_names_, key) ||
- selected_pref_store_->GetValue(key, NULL)) {
- return selected_pref_store_.get();
- }
- return default_pref_store_.get();
-}
-
PersistentPrefStore* SegregatedPrefStore::StoreForKey(const std::string& key) {
- if (ContainsKey(selected_preference_names_, key))
- return selected_pref_store_.get();
-
- // Check if this unselected value was previously selected. If so, migrate it
- // back to the unselected store.
- // It's hard to do this in a single pass at startup because PrefStore does not
- // permit us to enumerate its contents.
- const base::Value* value = NULL;
- if (selected_pref_store_->GetValue(key, &value)) {
- default_pref_store_->SetValue(key, value->DeepCopy());
- // Commit |default_pref_store_| to guarantee that the migrated value is
- // flushed to disk before the removal from |selected_pref_store_| is
- // eventually flushed to disk.
- default_pref_store_->CommitPendingWrite();
-
- value = NULL;
- selected_pref_store_->RemoveValue(key);
- }
+ return ContainsKey(selected_preference_names_, key) ? selected_pref_store_
+ : default_pref_store_;
+}
- return default_pref_store_.get();
+const PersistentPrefStore* SegregatedPrefStore::StoreForKey(
+ const std::string& key) const {
+ return ContainsKey(selected_preference_names_, key) ? selected_pref_store_
+ : default_pref_store_;
}
diff --git a/chrome/browser/prefs/tracked/segregated_pref_store.h b/chrome/browser/prefs/tracked/segregated_pref_store.h
index 64707d0..fc8ef91 100644
--- a/chrome/browser/prefs/tracked/segregated_pref_store.h
+++ b/chrome/browser/prefs/tracked/segregated_pref_store.h
@@ -6,8 +6,8 @@
#define CHROME_BROWSER_PREFS_TRACKED_SEGREGATED_PREF_STORE_H_
#include <set>
+#include <string>
-#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -41,8 +41,7 @@ class SegregatedPrefStore : public PersistentPrefStore {
SegregatedPrefStore(
const scoped_refptr<PersistentPrefStore>& default_pref_store,
const scoped_refptr<PersistentPrefStore>& selected_pref_store,
- const std::set<std::string>& selected_pref_names,
- const base::Closure& on_initialization);
+ const std::set<std::string>& selected_pref_names);
// PrefStore implementation
virtual void AddObserver(Observer* observer) OVERRIDE;
@@ -89,20 +88,14 @@ class SegregatedPrefStore : public PersistentPrefStore {
virtual ~SegregatedPrefStore();
- // Returns |selected_pref_store| if |key| is selected or a value is present
- // in |selected_pref_store|. This could happen if |key| was previously
- // selected.
- const PersistentPrefStore* StoreForKey(const std::string& key) const;
-
- // Returns |selected_pref_store| if |key| is selected. If |key| is
- // unselected but has a value in |selected_pref_store|, moves the value to
- // |default_pref_store| before returning |default_pref_store|.
+ // Returns |selected_pref_store| if |key| is selected and |default_pref_store|
+ // otherwise.
PersistentPrefStore* StoreForKey(const std::string& key);
+ const PersistentPrefStore* StoreForKey(const std::string& key) const;
scoped_refptr<PersistentPrefStore> default_pref_store_;
scoped_refptr<PersistentPrefStore> selected_pref_store_;
std::set<std::string> selected_preference_names_;
- base::Closure on_initialization_;
scoped_ptr<PersistentPrefStore::ReadErrorDelegate> read_error_delegate_;
ObserverList<PrefStore::Observer, true> observers_;
diff --git a/chrome/browser/prefs/tracked/segregated_pref_store_unittest.cc b/chrome/browser/prefs/tracked/segregated_pref_store_unittest.cc
index f1c27c7..a7f582d 100644
--- a/chrome/browser/prefs/tracked/segregated_pref_store_unittest.cc
+++ b/chrome/browser/prefs/tracked/segregated_pref_store_unittest.cc
@@ -57,8 +57,7 @@ class MockReadErrorDelegate : public PersistentPrefStore::ReadErrorDelegate {
class SegregatedPrefStoreTest : public testing::Test {
public:
SegregatedPrefStoreTest()
- : initialization_callback_invoked_(false),
- read_error_delegate_data_(false,
+ : read_error_delegate_data_(false,
PersistentPrefStore::PREF_READ_ERROR_NONE),
read_error_delegate_(
new MockReadErrorDelegate(&read_error_delegate_data_)) {}
@@ -73,9 +72,7 @@ class SegregatedPrefStoreTest : public testing::Test {
segregated_store_ = new SegregatedPrefStore(
default_store_,
selected_store_,
- selected_pref_names,
- base::Bind(&SegregatedPrefStoreTest::InitializationCallback,
- base::Unretained(this)));
+ selected_pref_names);
segregated_store_->AddObserver(&observer_);
}
@@ -92,7 +89,6 @@ class SegregatedPrefStoreTest : public testing::Test {
}
PrefStoreObserverMock observer_;
- bool initialization_callback_invoked_;
scoped_refptr<TestingPrefStore> default_store_;
scoped_refptr<TestingPrefStore> selected_store_;
@@ -101,12 +97,6 @@ class SegregatedPrefStoreTest : public testing::Test {
MockReadErrorDelegate::Data read_error_delegate_data_;
private:
- void InitializationCallback() {
- EXPECT_FALSE(observer_.initialized);
- EXPECT_FALSE(initialization_callback_invoked_);
- initialization_callback_invoked_ = true;
- }
-
scoped_ptr<MockReadErrorDelegate> read_error_delegate_;
};
@@ -155,26 +145,9 @@ TEST_F(SegregatedPrefStoreTest, ReadValues) {
ASSERT_TRUE(segregated_store_->GetValue(kUnselectedPref, NULL));
}
-TEST_F(SegregatedPrefStoreTest, PreviouslySelected) {
- selected_store_->SetValue(kUnselectedPref, new base::StringValue(kValue1));
- segregated_store_->ReadPrefs();
- // It will read from the selected store.
- ASSERT_TRUE(segregated_store_->GetValue(kUnselectedPref, NULL));
- ASSERT_TRUE(selected_store_->GetValue(kUnselectedPref, NULL));
- ASSERT_FALSE(default_store_->GetValue(kUnselectedPref, NULL));
-
- // But when we update the value...
- segregated_store_->SetValue(kUnselectedPref, new base::StringValue(kValue2));
- // ...it will be migrated.
- ASSERT_TRUE(segregated_store_->GetValue(kUnselectedPref, NULL));
- ASSERT_FALSE(selected_store_->GetValue(kUnselectedPref, NULL));
- ASSERT_TRUE(default_store_->GetValue(kUnselectedPref, NULL));
-}
-
TEST_F(SegregatedPrefStoreTest, Observer) {
EXPECT_EQ(PersistentPrefStore::PREF_READ_ERROR_NONE,
segregated_store_->ReadPrefs());
- EXPECT_TRUE(initialization_callback_invoked_);
EXPECT_TRUE(observer_.initialized);
EXPECT_TRUE(observer_.initialization_success);
EXPECT_TRUE(observer_.changed_keys.empty());
diff --git a/chrome/browser/prefs/tracked/tracked_preferences_migration.cc b/chrome/browser/prefs/tracked/tracked_preferences_migration.cc
new file mode 100644
index 0000000..c358256
--- /dev/null
+++ b/chrome/browser/prefs/tracked/tracked_preferences_migration.cc
@@ -0,0 +1,267 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "chrome/browser/prefs/tracked/tracked_preferences_migration.h"
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/values.h"
+#include "chrome/browser/prefs/interceptable_pref_filter.h"
+
+namespace {
+
+class TrackedPreferencesMigrator
+ : public base::RefCounted<TrackedPreferencesMigrator> {
+ public:
+ TrackedPreferencesMigrator(
+ const std::set<std::string>& unprotected_pref_names,
+ const std::set<std::string>& protected_pref_names,
+ const base::Callback<void(const std::string& key)>&
+ unprotected_store_cleaner,
+ const base::Callback<void(const std::string& key)>&
+ protected_store_cleaner,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_unprotected_store_write_callback,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_protected_store_write_callback,
+ InterceptablePrefFilter* unprotected_pref_filter,
+ InterceptablePrefFilter* protected_pref_filter);
+
+ private:
+ friend class base::RefCounted<TrackedPreferencesMigrator>;
+
+ enum PrefFilterID {
+ UNPROTECTED_PREF_FILTER,
+ PROTECTED_PREF_FILTER
+ };
+
+ ~TrackedPreferencesMigrator();
+
+ // Stores the data coming in from the filter identified by |id| into this
+ // class and then calls MigrateIfReady();
+ void InterceptFilterOnLoad(
+ PrefFilterID id,
+ const InterceptablePrefFilter::FinalizeFilterOnLoadCallback&
+ finalize_filter_on_load,
+ scoped_ptr<base::DictionaryValue> prefs);
+
+ // Proceeds with migration if both |unprotected_prefs_| and |protected_prefs_|
+ // have been set.
+ void MigrateIfReady();
+
+ const std::set<std::string> unprotected_pref_names_;
+ const std::set<std::string> protected_pref_names_;
+
+ const base::Callback<void(const std::string& key)> unprotected_store_cleaner_;
+ const base::Callback<void(const std::string& key)> protected_store_cleaner_;
+ const base::Callback<void(const base::Closure&)>
+ register_on_successful_unprotected_store_write_callback_;
+ const base::Callback<void(const base::Closure&)>
+ register_on_successful_protected_store_write_callback_;
+
+ InterceptablePrefFilter::FinalizeFilterOnLoadCallback
+ finalize_unprotected_filter_on_load_;
+ InterceptablePrefFilter::FinalizeFilterOnLoadCallback
+ finalize_protected_filter_on_load_;
+
+ scoped_ptr<base::DictionaryValue> unprotected_prefs_;
+ scoped_ptr<base::DictionaryValue> protected_prefs_;
+
+ DISALLOW_COPY_AND_ASSIGN(TrackedPreferencesMigrator);
+};
+
+// Invokes |store_cleaner| for every |keys_to_clean|.
+void CleanupPrefStore(
+ const base::Callback<void(const std::string& key)>& store_cleaner,
+ const std::set<std::string>& keys_to_clean) {
+ for (std::set<std::string>::const_iterator it = keys_to_clean.begin();
+ it != keys_to_clean.end(); ++it) {
+ store_cleaner.Run(*it);
+ }
+}
+
+// If |wait_for_commit_to_destination_store|: schedules (via
+// |register_on_successful_destination_store_write_callback|) a cleanup of the
+// |keys_to_clean| from the source pref store (through |source_store_cleaner|)
+// once the destination pref store they were migrated to was successfully
+// written to disk. Otherwise, executes the cleanup right away.
+void ScheduleSourcePrefStoreCleanup(
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_destination_store_write_callback,
+ const base::Callback<void(const std::string& key)>& source_store_cleaner,
+ const std::set<std::string>& keys_to_clean,
+ bool wait_for_commit_to_destination_store) {
+ DCHECK(!keys_to_clean.empty());
+ if (wait_for_commit_to_destination_store) {
+ register_on_successful_destination_store_write_callback.Run(
+ base::Bind(&CleanupPrefStore, source_store_cleaner, keys_to_clean));
+ } else {
+ CleanupPrefStore(source_store_cleaner, keys_to_clean);
+ }
+}
+
+// Copies the value of each pref in |pref_names| which is set in |old_store|,
+// but not in |new_store| into |new_store|. Sets |old_store_needs_cleanup| to
+// true if any old duplicates remain in |old_store| and sets |new_store_altered|
+// to true if any value was copied to |new_store|.
+void MigratePrefsFromOldToNewStore(const std::set<std::string>& pref_names,
+ const base::DictionaryValue* old_store,
+ base::DictionaryValue* new_store,
+ bool* old_store_needs_cleanup,
+ bool* new_store_altered) {
+ for (std::set<std::string>::const_iterator it = pref_names.begin();
+ it != pref_names.end(); ++it) {
+ const std::string& pref_name = *it;
+
+ const base::Value* value_in_old_store = NULL;
+ if (!old_store->Get(pref_name, &value_in_old_store))
+ continue;
+
+ // Whether this value ends up being copied below or was left behind by a
+ // previous incomplete migration, it should be cleaned up.
+ *old_store_needs_cleanup = true;
+
+ if (new_store->Get(pref_name, NULL))
+ continue;
+
+ // Copy the value from |old_store| to |new_store| rather than moving it to
+ // avoid data loss should |old_store| be flushed to disk without |new_store|
+ // having equivalently been successfully flushed to disk (e.g., on crash or
+ // in cases where |new_store| is read-only following a read error on
+ // startup).
+ new_store->Set(pref_name, value_in_old_store->DeepCopy());
+ *new_store_altered = true;
+ }
+}
+
+TrackedPreferencesMigrator::TrackedPreferencesMigrator(
+ const std::set<std::string>& unprotected_pref_names,
+ const std::set<std::string>& protected_pref_names,
+ const base::Callback<void(const std::string& key)>&
+ unprotected_store_cleaner,
+ const base::Callback<void(const std::string& key)>& protected_store_cleaner,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_unprotected_store_write_callback,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_protected_store_write_callback,
+ InterceptablePrefFilter* unprotected_pref_filter,
+ InterceptablePrefFilter* protected_pref_filter)
+ : unprotected_pref_names_(unprotected_pref_names),
+ protected_pref_names_(protected_pref_names),
+ unprotected_store_cleaner_(unprotected_store_cleaner),
+ protected_store_cleaner_(protected_store_cleaner),
+ register_on_successful_unprotected_store_write_callback_(
+ register_on_successful_unprotected_store_write_callback),
+ register_on_successful_protected_store_write_callback_(
+ register_on_successful_protected_store_write_callback) {
+ // The callbacks bound below will own this TrackedPreferencesMigrator by
+ // reference.
+ unprotected_pref_filter->InterceptNextFilterOnLoad(
+ base::Bind(&TrackedPreferencesMigrator::InterceptFilterOnLoad,
+ this,
+ UNPROTECTED_PREF_FILTER));
+ protected_pref_filter->InterceptNextFilterOnLoad(
+ base::Bind(&TrackedPreferencesMigrator::InterceptFilterOnLoad,
+ this,
+ PROTECTED_PREF_FILTER));
+}
+
+TrackedPreferencesMigrator::~TrackedPreferencesMigrator() {}
+
+void TrackedPreferencesMigrator::InterceptFilterOnLoad(
+ PrefFilterID id,
+ const InterceptablePrefFilter::FinalizeFilterOnLoadCallback&
+ finalize_filter_on_load,
+ scoped_ptr<base::DictionaryValue> prefs) {
+ switch (id) {
+ case UNPROTECTED_PREF_FILTER:
+ finalize_unprotected_filter_on_load_ = finalize_filter_on_load;
+ unprotected_prefs_ = prefs.Pass();
+ break;
+ case PROTECTED_PREF_FILTER:
+ finalize_protected_filter_on_load_ = finalize_filter_on_load;
+ protected_prefs_ = prefs.Pass();
+ break;
+ }
+
+ MigrateIfReady();
+}
+
+void TrackedPreferencesMigrator::MigrateIfReady() {
+ // Wait for both stores to have been read before proceeding.
+ if (!protected_prefs_ || !unprotected_prefs_)
+ return;
+
+ bool protected_prefs_need_cleanup = false;
+ bool unprotected_prefs_altered = false;
+ MigratePrefsFromOldToNewStore(unprotected_pref_names_,
+ protected_prefs_.get(),
+ unprotected_prefs_.get(),
+ &protected_prefs_need_cleanup,
+ &unprotected_prefs_altered);
+ bool unprotected_prefs_need_cleanup = false;
+ bool protected_prefs_altered = false;
+ MigratePrefsFromOldToNewStore(protected_pref_names_,
+ unprotected_prefs_.get(),
+ protected_prefs_.get(),
+ &unprotected_prefs_need_cleanup,
+ &protected_prefs_altered);
+
+ // Hand the processed prefs back to their respective filters.
+ finalize_unprotected_filter_on_load_.Run(unprotected_prefs_.Pass(),
+ unprotected_prefs_altered);
+ finalize_protected_filter_on_load_.Run(protected_prefs_.Pass(),
+ protected_prefs_altered);
+
+ if (unprotected_prefs_need_cleanup) {
+ // Schedule a cleanup of the |protected_pref_names_| from the unprotected
+ // prefs once the protected prefs were successfully written to disk (or
+ // do it immediately if |!protected_prefs_altered|).
+ ScheduleSourcePrefStoreCleanup(
+ register_on_successful_protected_store_write_callback_,
+ unprotected_store_cleaner_,
+ protected_pref_names_,
+ protected_prefs_altered);
+ }
+
+ if (protected_prefs_need_cleanup) {
+ // Schedule a cleanup of the |unprotected_pref_names_| from the protected
+ // prefs once the unprotected prefs were successfully written to disk (or
+ // do it immediately if |!unprotected_prefs_altered|).
+ ScheduleSourcePrefStoreCleanup(
+ register_on_successful_unprotected_store_write_callback_,
+ protected_store_cleaner_,
+ unprotected_pref_names_,
+ unprotected_prefs_altered);
+ }
+}
+
+} // namespace
+
+void SetupTrackedPreferencesMigration(
+ const std::set<std::string>& unprotected_pref_names,
+ const std::set<std::string>& protected_pref_names,
+ const base::Callback<void(const std::string& key)>&
+ unprotected_store_cleaner,
+ const base::Callback<void(const std::string& key)>& protected_store_cleaner,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_unprotected_store_write_callback,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_protected_store_write_callback,
+ InterceptablePrefFilter* unprotected_pref_filter,
+ InterceptablePrefFilter* protected_pref_filter) {
+ scoped_refptr<TrackedPreferencesMigrator> prefs_migrator(
+ new TrackedPreferencesMigrator(
+ unprotected_pref_names,
+ protected_pref_names,
+ unprotected_store_cleaner,
+ protected_store_cleaner,
+ register_on_successful_unprotected_store_write_callback,
+ register_on_successful_protected_store_write_callback,
+ unprotected_pref_filter,
+ protected_pref_filter));
+}
diff --git a/chrome/browser/prefs/tracked/tracked_preferences_migration.h b/chrome/browser/prefs/tracked/tracked_preferences_migration.h
new file mode 100644
index 0000000..348f844
--- /dev/null
+++ b/chrome/browser/prefs/tracked/tracked_preferences_migration.h
@@ -0,0 +1,37 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CHROME_BROWSER_PREFS_TRACKED_TRACKED_PREFERENCES_MIGRATION_H_
+#define CHROME_BROWSER_PREFS_TRACKED_TRACKED_PREFERENCES_MIGRATION_H_
+
+#include <set>
+#include <string>
+
+#include "base/callback_forward.h"
+
+class InterceptablePrefFilter;
+
+// Sets up InterceptablePrefFilter::FilterOnLoadInterceptors on
+// |unprotected_pref_filter| and |protected_pref_filter| which prevents each
+// filter from running their on load operations until the interceptors decide to
+// hand the prefs back to them (after migration is complete). |
+// (un)protected_store_cleaner| and
+// |register_on_successful_(un)protected_store_write_callback| are used to do
+// post-migration cleanup tasks. Those should be bound to weak pointers to avoid
+// blocking shutdown. The migration framework is resilient to a failed cleanup
+// (it will simply try again in the next Chrome run).
+void SetupTrackedPreferencesMigration(
+ const std::set<std::string>& unprotected_pref_names,
+ const std::set<std::string>& protected_pref_names,
+ const base::Callback<void(const std::string& key)>&
+ unprotected_store_cleaner,
+ const base::Callback<void(const std::string& key)>& protected_store_cleaner,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_unprotected_store_write_callback,
+ const base::Callback<void(const base::Closure&)>&
+ register_on_successful_protected_store_write_callback,
+ InterceptablePrefFilter* unprotected_pref_filter,
+ InterceptablePrefFilter* protected_pref_filter);
+
+#endif // CHROME_BROWSER_PREFS_TRACKED_TRACKED_PREFERENCES_MIGRATION_H_
diff --git a/chrome/browser/prefs/tracked/tracked_preferences_migration_unittest.cc b/chrome/browser/prefs/tracked/tracked_preferences_migration_unittest.cc
new file mode 100644
index 0000000..1c93a39
--- /dev/null
+++ b/chrome/browser/prefs/tracked/tracked_preferences_migration_unittest.cc
@@ -0,0 +1,501 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "chrome/browser/prefs/tracked/tracked_preferences_migration.h"
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/values.h"
+#include "chrome/browser/prefs/interceptable_pref_filter.h"
+#include "chrome/browser/prefs/tracked/tracked_preferences_migration.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace {
+
+// An unprotected pref.
+const char kUnprotectedPref[] = "unprotected";
+// A protected pref.
+const char kProtectedPref[] = "protected";
+// A protected pref which is initially stored in the unprotected store.
+const char kPreviouslyUnprotectedPref[] = "previously.unprotected";
+// An unprotected pref which is initially stored in the protected store.
+const char kPreviouslyProtectedPref[] = "previously.protected";
+
+const char kUnprotectedPrefValue[] = "unprotected_value";
+const char kProtectedPrefValue[] = "protected_value";
+const char kPreviouslyUnprotectedPrefValue[] = "previously_unprotected_value";
+const char kPreviouslyProtectedPrefValue[] = "previously_protected_value";
+
+// A simple InterceptablePrefFilter which doesn't do anything but hand the prefs
+// back downstream in FinalizeFilterOnLoad.
+class SimpleInterceptablePrefFilter : public InterceptablePrefFilter {
+ public:
+ // PrefFilter remaining implementation.
+ virtual void FilterUpdate(const std::string& path) OVERRIDE {
+ ADD_FAILURE();
+ }
+ virtual void FilterSerializeData(
+ const base::DictionaryValue* pref_store_contents) OVERRIDE {
+ ADD_FAILURE();
+ }
+
+ private:
+ // InterceptablePrefFilter implementation.
+ virtual void FinalizeFilterOnLoad(
+ const PostFilterOnLoadCallback& post_filter_on_load_callback,
+ scoped_ptr<base::DictionaryValue> pref_store_contents,
+ bool prefs_altered) OVERRIDE {
+ post_filter_on_load_callback.Run(pref_store_contents.Pass(), prefs_altered);
+ }
+};
+
+// A test fixture designed to like this:
+// 1) Set up initial store prefs with PresetStoreValue().
+// 2) Hand both sets of prefs to the migrator via HandPrefsToMigrator().
+// 3) Migration completes synchronously when the second store hands its prefs
+// over.
+// 4) Verifications can be made via various methods of this fixture.
+// This fixture should not be re-used (i.e., only one migration should be
+// performed per test).
+class TrackedPreferencesMigrationTest : public testing::Test {
+ public:
+ enum MockPrefStoreID {
+ MOCK_UNPROTECTED_PREF_STORE,
+ MOCK_PROTECTED_PREF_STORE,
+ };
+
+ TrackedPreferencesMigrationTest()
+ : unprotected_prefs_(new base::DictionaryValue),
+ protected_prefs_(new base::DictionaryValue),
+ migration_modified_unprotected_store_(false),
+ migration_modified_protected_store_(false),
+ unprotected_store_migration_complete_(false),
+ protected_store_migration_complete_(false) {}
+
+ virtual void SetUp() OVERRIDE {
+ std::set<std::string> unprotected_pref_names;
+ std::set<std::string> protected_pref_names;
+ unprotected_pref_names.insert(kUnprotectedPref);
+ unprotected_pref_names.insert(kPreviouslyProtectedPref);
+ protected_pref_names.insert(kProtectedPref);
+ protected_pref_names.insert(kPreviouslyUnprotectedPref);
+
+ SetupTrackedPreferencesMigration(
+ unprotected_pref_names,
+ protected_pref_names,
+ base::Bind(&TrackedPreferencesMigrationTest::RemovePathFromStore,
+ base::Unretained(this), MOCK_UNPROTECTED_PREF_STORE),
+ base::Bind(&TrackedPreferencesMigrationTest::RemovePathFromStore,
+ base::Unretained(this), MOCK_PROTECTED_PREF_STORE),
+ base::Bind(
+ &TrackedPreferencesMigrationTest::RegisterSuccessfulWriteClosure,
+ base::Unretained(this), MOCK_UNPROTECTED_PREF_STORE),
+ base::Bind(
+ &TrackedPreferencesMigrationTest::RegisterSuccessfulWriteClosure,
+ base::Unretained(this), MOCK_PROTECTED_PREF_STORE),
+ &mock_unprotected_pref_filter_,
+ &mock_protected_pref_filter_);
+
+ // Verify initial expectations are met.
+ EXPECT_TRUE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_FALSE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_PROTECTED_PREF_STORE));
+
+ std::vector<std::pair<std::string, std::string> > no_prefs_stored;
+ VerifyValuesStored(MOCK_UNPROTECTED_PREF_STORE, no_prefs_stored);
+ VerifyValuesStored(MOCK_PROTECTED_PREF_STORE, no_prefs_stored);
+ }
+
+ protected:
+ // Sets |key| to |value| in the test store identified by |store_id| before
+ // migration begins.
+ void PresetStoreValue(MockPrefStoreID store_id,
+ const std::string& key,
+ const std::string value) {
+ base::DictionaryValue* store = NULL;
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ store = unprotected_prefs_.get();
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ store = protected_prefs_.get();
+ break;
+ }
+ DCHECK(store);
+
+ store->SetString(key, value);
+ }
+
+ // Returns true if the store opposite to |store_id| is observed for its next
+ // successful write.
+ bool WasOnSuccessfulWriteCallbackRegistered(MockPrefStoreID store_id) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ return !protected_store_successful_write_callback_.is_null();
+ case MOCK_PROTECTED_PREF_STORE:
+ return !unprotected_store_successful_write_callback_.is_null();
+ }
+ NOTREACHED();
+ return false;
+ }
+
+ // Verifies that the (key, value) pairs in |expected_prefs_in_store| are found
+ // in the store identified by |store_id|.
+ void VerifyValuesStored(
+ MockPrefStoreID store_id,
+ const std::vector<std::pair<std::string, std::string> >&
+ expected_prefs_in_store) {
+ base::DictionaryValue* store = NULL;
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ store = unprotected_prefs_.get();
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ store = protected_prefs_.get();
+ break;
+ }
+ DCHECK(store);
+
+ for (std::vector<std::pair<std::string, std::string> >::const_iterator it =
+ expected_prefs_in_store.begin();
+ it != expected_prefs_in_store.end(); ++it) {
+ std::string val;
+ EXPECT_TRUE(store->GetString(it->first, &val));
+ EXPECT_EQ(it->second, val);
+ }
+ }
+
+ // Both stores need to hand their prefs over in order for migration to kick
+ // in.
+ void HandPrefsToMigrator(MockPrefStoreID store_id) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ mock_unprotected_pref_filter_.FilterOnLoad(
+ base::Bind(&TrackedPreferencesMigrationTest::GetPrefsBack,
+ base::Unretained(this),
+ MOCK_UNPROTECTED_PREF_STORE),
+ unprotected_prefs_.Pass());
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ mock_protected_pref_filter_.FilterOnLoad(
+ base::Bind(&TrackedPreferencesMigrationTest::GetPrefsBack,
+ base::Unretained(this),
+ MOCK_PROTECTED_PREF_STORE),
+ protected_prefs_.Pass());
+ break;
+ }
+ }
+
+ bool HasPrefs(MockPrefStoreID store_id) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ return unprotected_prefs_;
+ case MOCK_PROTECTED_PREF_STORE:
+ return protected_prefs_;
+ }
+ NOTREACHED();
+ return false;
+ }
+
+ bool StoreModifiedByMigration(MockPrefStoreID store_id) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ return migration_modified_unprotected_store_;
+ case MOCK_PROTECTED_PREF_STORE:
+ return migration_modified_protected_store_;
+ }
+ NOTREACHED();
+ return false;
+ }
+
+ bool MigrationCompleted() {
+ return unprotected_store_migration_complete_ &&
+ protected_store_migration_complete_;
+ }
+
+ void SimulateSuccessfulWrite(MockPrefStoreID store_id) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ EXPECT_FALSE(unprotected_store_successful_write_callback_.is_null());
+ unprotected_store_successful_write_callback_.Run();
+ unprotected_store_successful_write_callback_.Reset();
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ EXPECT_FALSE(protected_store_successful_write_callback_.is_null());
+ protected_store_successful_write_callback_.Run();
+ protected_store_successful_write_callback_.Reset();
+ break;
+ }
+ }
+
+ private:
+ void RegisterSuccessfulWriteClosure(
+ MockPrefStoreID store_id,
+ const base::Closure& successful_write_closure) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ EXPECT_TRUE(unprotected_store_successful_write_callback_.is_null());
+ unprotected_store_successful_write_callback_ = successful_write_closure;
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ EXPECT_TRUE(protected_store_successful_write_callback_.is_null());
+ protected_store_successful_write_callback_ = successful_write_closure;
+ break;
+ }
+ }
+
+ // Helper given as an InterceptablePrefFilter::FinalizeFilterOnLoadCallback
+ // to the migrator to be invoked when it's done.
+ void GetPrefsBack(MockPrefStoreID store_id,
+ scoped_ptr<base::DictionaryValue> prefs,
+ bool prefs_altered) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ EXPECT_FALSE(unprotected_prefs_);
+ unprotected_prefs_ = prefs.Pass();
+ migration_modified_unprotected_store_ = prefs_altered;
+ unprotected_store_migration_complete_ = true;
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ EXPECT_FALSE(protected_prefs_);
+ protected_prefs_ = prefs.Pass();
+ migration_modified_protected_store_ = prefs_altered;
+ protected_store_migration_complete_ = true;
+ break;
+ }
+ }
+
+ // Helper given as a cleaning callback to the migrator.
+ void RemovePathFromStore(MockPrefStoreID store_id, const std::string& key) {
+ switch (store_id) {
+ case MOCK_UNPROTECTED_PREF_STORE:
+ ASSERT_TRUE(unprotected_prefs_);
+ unprotected_prefs_->RemovePath(key, NULL);
+ break;
+ case MOCK_PROTECTED_PREF_STORE:
+ ASSERT_TRUE(protected_prefs_);
+ protected_prefs_->RemovePath(key, NULL);
+ break;
+ }
+ }
+
+ scoped_ptr<base::DictionaryValue> unprotected_prefs_;
+ scoped_ptr<base::DictionaryValue> protected_prefs_;
+
+ SimpleInterceptablePrefFilter mock_unprotected_pref_filter_;
+ SimpleInterceptablePrefFilter mock_protected_pref_filter_;
+
+ base::Closure unprotected_store_successful_write_callback_;
+ base::Closure protected_store_successful_write_callback_;
+
+ bool migration_modified_unprotected_store_;
+ bool migration_modified_protected_store_;
+
+ bool unprotected_store_migration_complete_;
+ bool protected_store_migration_complete_;
+};
+
+} // namespace
+
+TEST_F(TrackedPreferencesMigrationTest, NoMigrationRequired) {
+ PresetStoreValue(MOCK_UNPROTECTED_PREF_STORE, kUnprotectedPref,
+ kUnprotectedPrefValue);
+ PresetStoreValue(MOCK_PROTECTED_PREF_STORE, kProtectedPref,
+ kProtectedPrefValue);
+
+ // Hand unprotected prefs to the migrator which should wait for the protected
+ // prefs.
+ HandPrefsToMigrator(MOCK_UNPROTECTED_PREF_STORE);
+ EXPECT_FALSE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(MigrationCompleted());
+
+ // Hand protected prefs to the migrator which should proceed with the
+ // migration synchronously.
+ HandPrefsToMigrator(MOCK_PROTECTED_PREF_STORE);
+ EXPECT_TRUE(MigrationCompleted());
+
+ // Prefs should have been handed back over.
+ EXPECT_TRUE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_FALSE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(StoreModifiedByMigration(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_FALSE(StoreModifiedByMigration(MOCK_PROTECTED_PREF_STORE));
+
+ std::vector<std::pair<std::string, std::string> > expected_unprotected_values;
+ expected_unprotected_values.push_back(
+ std::make_pair(kUnprotectedPref, kUnprotectedPrefValue));
+ VerifyValuesStored(MOCK_UNPROTECTED_PREF_STORE, expected_unprotected_values);
+
+ std::vector<std::pair<std::string, std::string> > expected_protected_values;
+ expected_protected_values.push_back(
+ std::make_pair(kProtectedPref, kProtectedPrefValue));
+ VerifyValuesStored(MOCK_PROTECTED_PREF_STORE, expected_protected_values);
+}
+
+TEST_F(TrackedPreferencesMigrationTest, FullMigration) {
+ PresetStoreValue(
+ MOCK_UNPROTECTED_PREF_STORE, kUnprotectedPref, kUnprotectedPrefValue);
+ PresetStoreValue(MOCK_UNPROTECTED_PREF_STORE,
+ kPreviouslyUnprotectedPref,
+ kPreviouslyUnprotectedPrefValue);
+ PresetStoreValue(
+ MOCK_PROTECTED_PREF_STORE, kProtectedPref, kProtectedPrefValue);
+ PresetStoreValue(MOCK_PROTECTED_PREF_STORE,
+ kPreviouslyProtectedPref,
+ kPreviouslyProtectedPrefValue);
+
+ HandPrefsToMigrator(MOCK_UNPROTECTED_PREF_STORE);
+ EXPECT_FALSE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(MigrationCompleted());
+
+ HandPrefsToMigrator(MOCK_PROTECTED_PREF_STORE);
+ EXPECT_TRUE(MigrationCompleted());
+
+ EXPECT_TRUE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_TRUE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_TRUE(StoreModifiedByMigration(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(StoreModifiedByMigration(MOCK_PROTECTED_PREF_STORE));
+
+ // Values should have been migrated to their store, but migrated values should
+ // still remain in the source store until cleanup tasks are later invoked.
+ {
+ std::vector<std::pair<std::string, std::string> >
+ expected_unprotected_values;
+ expected_unprotected_values.push_back(std::make_pair(
+ kUnprotectedPref, kUnprotectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyProtectedPref, kPreviouslyProtectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyUnprotectedPref, kPreviouslyUnprotectedPrefValue));
+ VerifyValuesStored(MOCK_UNPROTECTED_PREF_STORE,
+ expected_unprotected_values);
+
+ std::vector<std::pair<std::string, std::string> > expected_protected_values;
+ expected_protected_values.push_back(std::make_pair(
+ kProtectedPref, kProtectedPrefValue));
+ expected_protected_values.push_back(std::make_pair(
+ kPreviouslyUnprotectedPref, kPreviouslyUnprotectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyProtectedPref, kPreviouslyProtectedPrefValue));
+ VerifyValuesStored(MOCK_PROTECTED_PREF_STORE, expected_protected_values);
+ }
+
+ // A successful write of the protected pref store should result in a clean up
+ // of the unprotected store.
+ SimulateSuccessfulWrite(MOCK_PROTECTED_PREF_STORE);
+
+ {
+ std::vector<std::pair<std::string, std::string> >
+ expected_unprotected_values;
+ expected_unprotected_values.push_back(std::make_pair(
+ kUnprotectedPref, kUnprotectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyProtectedPref, kPreviouslyProtectedPrefValue));
+ VerifyValuesStored(MOCK_UNPROTECTED_PREF_STORE,
+ expected_unprotected_values);
+
+ std::vector<std::pair<std::string, std::string> > expected_protected_values;
+ expected_protected_values.push_back(std::make_pair(
+ kProtectedPref, kProtectedPrefValue));
+ expected_protected_values.push_back(std::make_pair(
+ kPreviouslyUnprotectedPref, kPreviouslyUnprotectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyProtectedPref, kPreviouslyProtectedPrefValue));
+ VerifyValuesStored(MOCK_PROTECTED_PREF_STORE, expected_protected_values);
+ }
+
+ SimulateSuccessfulWrite(MOCK_UNPROTECTED_PREF_STORE);
+
+ {
+ std::vector<std::pair<std::string, std::string> >
+ expected_unprotected_values;
+ expected_unprotected_values.push_back(std::make_pair(
+ kUnprotectedPref, kUnprotectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyProtectedPref, kPreviouslyProtectedPrefValue));
+ VerifyValuesStored(MOCK_UNPROTECTED_PREF_STORE,
+ expected_unprotected_values);
+
+ std::vector<std::pair<std::string, std::string> > expected_protected_values;
+ expected_protected_values.push_back(std::make_pair(
+ kProtectedPref, kProtectedPrefValue));
+ expected_protected_values.push_back(std::make_pair(
+ kPreviouslyUnprotectedPref, kPreviouslyUnprotectedPrefValue));
+ VerifyValuesStored(MOCK_PROTECTED_PREF_STORE, expected_protected_values);
+ }
+}
+
+TEST_F(TrackedPreferencesMigrationTest, CleanupOnly) {
+ // Already migrated; only cleanup needed.
+ PresetStoreValue(
+ MOCK_UNPROTECTED_PREF_STORE, kUnprotectedPref, kUnprotectedPrefValue);
+ PresetStoreValue(MOCK_UNPROTECTED_PREF_STORE,
+ kPreviouslyProtectedPref,
+ kPreviouslyProtectedPrefValue);
+ PresetStoreValue(MOCK_UNPROTECTED_PREF_STORE,
+ kPreviouslyUnprotectedPref,
+ kPreviouslyUnprotectedPrefValue);
+ PresetStoreValue(
+ MOCK_PROTECTED_PREF_STORE, kProtectedPref, kProtectedPrefValue);
+ PresetStoreValue(MOCK_PROTECTED_PREF_STORE,
+ kPreviouslyProtectedPref,
+ kPreviouslyProtectedPrefValue);
+ PresetStoreValue(MOCK_PROTECTED_PREF_STORE,
+ kPreviouslyUnprotectedPref,
+ kPreviouslyUnprotectedPrefValue);
+
+ HandPrefsToMigrator(MOCK_UNPROTECTED_PREF_STORE);
+ EXPECT_FALSE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(MigrationCompleted());
+
+ HandPrefsToMigrator(MOCK_PROTECTED_PREF_STORE);
+ EXPECT_TRUE(MigrationCompleted());
+
+ EXPECT_TRUE(HasPrefs(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_TRUE(HasPrefs(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_FALSE(
+ WasOnSuccessfulWriteCallbackRegistered(MOCK_PROTECTED_PREF_STORE));
+ EXPECT_FALSE(StoreModifiedByMigration(MOCK_UNPROTECTED_PREF_STORE));
+ EXPECT_FALSE(StoreModifiedByMigration(MOCK_PROTECTED_PREF_STORE));
+
+ // Cleanup should happen synchronously if the values were already present in
+ // their destination stores.
+ {
+ std::vector<std::pair<std::string, std::string> >
+ expected_unprotected_values;
+ expected_unprotected_values.push_back(std::make_pair(
+ kUnprotectedPref, kUnprotectedPrefValue));
+ expected_unprotected_values.push_back(std::make_pair(
+ kPreviouslyProtectedPref, kPreviouslyProtectedPrefValue));
+ VerifyValuesStored(MOCK_UNPROTECTED_PREF_STORE,
+ expected_unprotected_values);
+
+ std::vector<std::pair<std::string, std::string> > expected_protected_values;
+ expected_protected_values.push_back(std::make_pair(
+ kProtectedPref, kProtectedPrefValue));
+ expected_protected_values.push_back(std::make_pair(
+ kPreviouslyUnprotectedPref, kPreviouslyUnprotectedPrefValue));
+ VerifyValuesStored(MOCK_PROTECTED_PREF_STORE, expected_protected_values);
+ }
+}
diff --git a/chrome/chrome_browser.gypi b/chrome/chrome_browser.gypi
index d04f617..f6c194d 100644
--- a/chrome/chrome_browser.gypi
+++ b/chrome/chrome_browser.gypi
@@ -1542,6 +1542,8 @@
'browser/prefs/command_line_pref_store.h',
'browser/prefs/incognito_mode_prefs.cc',
'browser/prefs/incognito_mode_prefs.h',
+ 'browser/prefs/interceptable_pref_filter.cc',
+ 'browser/prefs/interceptable_pref_filter.h',
'browser/prefs/pref_hash_calculator.cc',
'browser/prefs/pref_hash_calculator.h',
'browser/prefs/pref_hash_filter.cc',
@@ -1583,6 +1585,8 @@
'browser/prefs/tracked/tracked_preference.h',
'browser/prefs/tracked/tracked_preference_helper.cc',
'browser/prefs/tracked/tracked_preference_helper.h',
+ 'browser/prefs/tracked/tracked_preferences_migration.cc',
+ 'browser/prefs/tracked/tracked_preferences_migration.h',
'browser/prefs/tracked/tracked_split_preference.cc',
'browser/prefs/tracked/tracked_split_preference.h',
'browser/prerender/external_prerender_handler_android.cc',
diff --git a/chrome/chrome_tests_unit.gypi b/chrome/chrome_tests_unit.gypi
index 2193656..4616acd 100644
--- a/chrome/chrome_tests_unit.gypi
+++ b/chrome/chrome_tests_unit.gypi
@@ -1167,6 +1167,7 @@
'browser/prefs/tracked/pref_hash_calculator_helper_win_unittest.cc',
'browser/prefs/tracked/pref_service_hash_store_contents_unittest.cc',
'browser/prefs/tracked/segregated_pref_store_unittest.cc',
+ 'browser/prefs/tracked/tracked_preferences_migration_unittest.cc',
'browser/prerender/prerender_history_unittest.cc',
'browser/prerender/prerender_tracker_unittest.cc',
'browser/prerender/prerender_unittest.cc',