summaryrefslogtreecommitdiffstats
path: root/extensions/browser/computed_hashes.cc
diff options
context:
space:
mode:
authorasargent@chromium.org <asargent@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-16 05:22:56 +0000
committerasargent@chromium.org <asargent@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-05-16 05:22:56 +0000
commitabd4cb2a97c4a40bde722099b196178f439edf4d (patch)
treeb104fe77cf8a869ab1e0c4af16e3fa7f412f1609 /extensions/browser/computed_hashes.cc
parent3ec0c24c6058c6f417d9fe41b3ce1fd5afd52086 (diff)
downloadchromium_src-abd4cb2a97c4a40bde722099b196178f439edf4d.zip
chromium_src-abd4cb2a97c4a40bde722099b196178f439edf4d.tar.gz
chromium_src-abd4cb2a97c4a40bde722099b196178f439edf4d.tar.bz2
A bunch of remaining parts of extension content verification (Reland)
-The real guts of content_hash_fetcher.cc, which fetches the verified_contents.json file from the webstore if needed and also runs tasks to compute and cache the block-level hashes of all files in an extension. -The real guts of content_hash_reader.cc, which uses the work done by the content_hash_fetcher during validation of extension file content as it's read off of disk at time of use. -Code to avoid verifying transcoded files (images used in browser process, and message catalogs). -Don't allow downgrade of mode via kForceFieldTrials command line switch -Various bits of plumbing to support all of the above This is a re-land with fixes; original review was: https://codereview.chromium.org/289533003 BUG=369895,373854 R=rockot@chromium.org Review URL: https://codereview.chromium.org/288273004 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@270937 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'extensions/browser/computed_hashes.cc')
-rw-r--r--extensions/browser/computed_hashes.cc129
1 files changed, 129 insertions, 0 deletions
diff --git a/extensions/browser/computed_hashes.cc b/extensions/browser/computed_hashes.cc
new file mode 100644
index 0000000..4a8a852
--- /dev/null
+++ b/extensions/browser/computed_hashes.cc
@@ -0,0 +1,129 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "extensions/browser/computed_hashes.h"
+
+#include "base/base64.h"
+#include "base/file_util.h"
+#include "base/files/file_path.h"
+#include "base/json/json_reader.h"
+#include "base/json/json_writer.h"
+
+namespace {
+const char kPathKey[] = "path";
+const char kBlockSizeKey[] = "block_size";
+const char kBlockHashesKey[] = "block_hashes";
+}
+
+namespace extensions {
+
+ComputedHashes::Reader::Reader() {
+}
+ComputedHashes::Reader::~Reader() {
+}
+
+bool ComputedHashes::Reader::InitFromFile(const base::FilePath& path) {
+ std::string contents;
+ if (!base::ReadFileToString(path, &contents))
+ return false;
+
+ base::ListValue* all_hashes = NULL;
+ scoped_ptr<base::Value> value(base::JSONReader::Read(contents));
+ if (!value.get() || !value->GetAsList(&all_hashes))
+ return false;
+
+ for (size_t i = 0; i < all_hashes->GetSize(); i++) {
+ base::DictionaryValue* dictionary = NULL;
+ if (!all_hashes->GetDictionary(i, &dictionary))
+ return false;
+
+ std::string relative_path_utf8;
+ if (!dictionary->GetString(kPathKey, &relative_path_utf8))
+ return false;
+
+ int block_size;
+ if (!dictionary->GetInteger(kBlockSizeKey, &block_size))
+ return false;
+ if (block_size <= 0 || ((block_size % 1024) != 0)) {
+ LOG(ERROR) << "Invalid block size: " << block_size;
+ block_size = 0;
+ return false;
+ }
+
+ base::ListValue* hashes_list = NULL;
+ if (!dictionary->GetList(kBlockHashesKey, &hashes_list))
+ return false;
+
+ base::FilePath relative_path =
+ base::FilePath::FromUTF8Unsafe(relative_path_utf8);
+
+ data_[relative_path] = HashInfo(block_size, std::vector<std::string>());
+ std::vector<std::string>* hashes = &(data_[relative_path].second);
+
+ for (size_t j = 0; j < hashes_list->GetSize(); j++) {
+ std::string encoded;
+ if (!hashes_list->GetString(j, &encoded))
+ return false;
+
+ hashes->push_back(std::string());
+ std::string* decoded = &hashes->back();
+ if (!base::Base64Decode(encoded, decoded)) {
+ hashes->clear();
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+bool ComputedHashes::Reader::GetHashes(const base::FilePath& relative_path,
+ int* block_size,
+ std::vector<std::string>* hashes) {
+ std::map<base::FilePath, HashInfo>::iterator i = data_.find(relative_path);
+ if (i == data_.end())
+ return false;
+ HashInfo& info = i->second;
+ *block_size = info.first;
+ *hashes = info.second;
+ return true;
+}
+
+ComputedHashes::Writer::Writer() {
+}
+ComputedHashes::Writer::~Writer() {
+}
+
+void ComputedHashes::Writer::AddHashes(const base::FilePath& relative_path,
+ int block_size,
+ const std::vector<std::string>& hashes) {
+ base::DictionaryValue* dict = new base::DictionaryValue();
+ base::ListValue* block_hashes = new base::ListValue();
+ file_list_.Append(dict);
+ dict->SetString(kPathKey, relative_path.AsUTF8Unsafe());
+ dict->SetInteger(kBlockSizeKey, block_size);
+ dict->Set(kBlockHashesKey, block_hashes);
+
+ for (std::vector<std::string>::const_iterator i = hashes.begin();
+ i != hashes.end();
+ ++i) {
+ std::string encoded;
+ base::Base64Encode(*i, &encoded);
+ block_hashes->AppendString(encoded);
+ }
+}
+
+bool ComputedHashes::Writer::WriteToFile(const base::FilePath& path) {
+ std::string json;
+ if (!base::JSONWriter::Write(&file_list_, &json))
+ return false;
+ int written = base::WriteFile(path, json.data(), json.size());
+ if (static_cast<unsigned>(written) != json.size()) {
+ LOG(ERROR) << "Error writing " << path.MaybeAsASCII()
+ << " ; write result:" << written << " expected:" << json.size();
+ return false;
+ }
+ return true;
+}
+
+} // namespace extensions