1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/browser/computed_hashes.h"
#include "base/base64.h"
#include "base/file_util.h"
#include "base/files/file_path.h"
#include "base/json/json_reader.h"
#include "base/json/json_writer.h"
namespace {
const char kPathKey[] = "path";
const char kBlockSizeKey[] = "block_size";
const char kBlockHashesKey[] = "block_hashes";
}
namespace extensions {
ComputedHashes::Reader::Reader() {
}
ComputedHashes::Reader::~Reader() {
}
bool ComputedHashes::Reader::InitFromFile(const base::FilePath& path) {
std::string contents;
if (!base::ReadFileToString(path, &contents))
return false;
base::ListValue* all_hashes = NULL;
scoped_ptr<base::Value> value(base::JSONReader::Read(contents));
if (!value.get() || !value->GetAsList(&all_hashes))
return false;
for (size_t i = 0; i < all_hashes->GetSize(); i++) {
base::DictionaryValue* dictionary = NULL;
if (!all_hashes->GetDictionary(i, &dictionary))
return false;
std::string relative_path_utf8;
if (!dictionary->GetString(kPathKey, &relative_path_utf8))
return false;
int block_size;
if (!dictionary->GetInteger(kBlockSizeKey, &block_size))
return false;
if (block_size <= 0 || ((block_size % 1024) != 0)) {
LOG(ERROR) << "Invalid block size: " << block_size;
block_size = 0;
return false;
}
base::ListValue* hashes_list = NULL;
if (!dictionary->GetList(kBlockHashesKey, &hashes_list))
return false;
base::FilePath relative_path =
base::FilePath::FromUTF8Unsafe(relative_path_utf8);
data_[relative_path] = HashInfo(block_size, std::vector<std::string>());
std::vector<std::string>* hashes = &(data_[relative_path].second);
for (size_t j = 0; j < hashes_list->GetSize(); j++) {
std::string encoded;
if (!hashes_list->GetString(j, &encoded))
return false;
hashes->push_back(std::string());
std::string* decoded = &hashes->back();
if (!base::Base64Decode(encoded, decoded)) {
hashes->clear();
return false;
}
}
}
return true;
}
bool ComputedHashes::Reader::GetHashes(const base::FilePath& relative_path,
int* block_size,
std::vector<std::string>* hashes) {
std::map<base::FilePath, HashInfo>::iterator i = data_.find(relative_path);
if (i == data_.end())
return false;
HashInfo& info = i->second;
*block_size = info.first;
*hashes = info.second;
return true;
}
ComputedHashes::Writer::Writer() {
}
ComputedHashes::Writer::~Writer() {
}
void ComputedHashes::Writer::AddHashes(const base::FilePath& relative_path,
int block_size,
const std::vector<std::string>& hashes) {
base::DictionaryValue* dict = new base::DictionaryValue();
base::ListValue* block_hashes = new base::ListValue();
file_list_.Append(dict);
dict->SetString(kPathKey, relative_path.AsUTF8Unsafe());
dict->SetInteger(kBlockSizeKey, block_size);
dict->Set(kBlockHashesKey, block_hashes);
for (std::vector<std::string>::const_iterator i = hashes.begin();
i != hashes.end();
++i) {
std::string encoded;
base::Base64Encode(*i, &encoded);
block_hashes->AppendString(encoded);
}
}
bool ComputedHashes::Writer::WriteToFile(const base::FilePath& path) {
std::string json;
if (!base::JSONWriter::Write(&file_list_, &json))
return false;
int written = base::WriteFile(path, json.data(), json.size());
if (static_cast<unsigned>(written) != json.size()) {
LOG(ERROR) << "Error writing " << path.MaybeAsASCII()
<< " ; write result:" << written << " expected:" << json.size();
return false;
}
return true;
}
} // namespace extensions
|