[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 1 | // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "extensions/browser/computed_hashes.h" |
| 6 | |
dcheng | f1950200 | 2016-09-14 15:18:18 | [diff] [blame] | 7 | #include <memory> |
| 8 | #include <utility> |
| 9 | |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 10 | #include "base/base64.h" |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 11 | #include "base/files/file_path.h" |
thestig | 9471270 | 2014-09-10 07:46:59 | [diff] [blame] | 12 | #include "base/files/file_util.h" |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 13 | #include "base/json/json_reader.h" |
| 14 | #include "base/json/json_writer.h" |
jdoerrie | 6ff270ca | 2017-06-07 10:31:45 | [diff] [blame] | 15 | #include "base/memory/ptr_util.h" |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 16 | #include "base/stl_util.h" |
| 17 | #include "base/values.h" |
| 18 | #include "crypto/secure_hash.h" |
| 19 | #include "crypto/sha2.h" |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 20 | |
| 21 | namespace { |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 22 | const char kBlockHashesKey[] = "block_hashes"; |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 23 | const char kBlockSizeKey[] = "block_size"; |
| 24 | const char kFileHashesKey[] = "file_hashes"; |
| 25 | const char kPathKey[] = "path"; |
| 26 | const char kVersionKey[] = "version"; |
| 27 | const int kVersion = 2; |
| 28 | } // namespace |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 29 | |
| 30 | namespace extensions { |
| 31 | |
| 32 | ComputedHashes::Reader::Reader() { |
| 33 | } |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 34 | |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 35 | ComputedHashes::Reader::~Reader() { |
| 36 | } |
| 37 | |
| 38 | bool ComputedHashes::Reader::InitFromFile(const base::FilePath& path) { |
| 39 | std::string contents; |
| 40 | if (!base::ReadFileToString(path, &contents)) |
| 41 | return false; |
| 42 | |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 43 | base::DictionaryValue* top_dictionary = NULL; |
dcheng | f5d24108 | 2016-04-21 03:43:11 | [diff] [blame] | 44 | std::unique_ptr<base::Value> value(base::JSONReader::Read(contents)); |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 45 | if (!value.get() || !value->GetAsDictionary(&top_dictionary)) |
| 46 | return false; |
| 47 | |
| 48 | // For now we don't support forwards or backwards compatability in the |
| 49 | // format, so we return false on version mismatch. |
| 50 | int version = 0; |
| 51 | if (!top_dictionary->GetInteger(kVersionKey, &version) || version != kVersion) |
| 52 | return false; |
| 53 | |
| 54 | base::ListValue* all_hashes = NULL; |
| 55 | if (!top_dictionary->GetList(kFileHashesKey, &all_hashes)) |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 56 | return false; |
| 57 | |
| 58 | for (size_t i = 0; i < all_hashes->GetSize(); i++) { |
| 59 | base::DictionaryValue* dictionary = NULL; |
| 60 | if (!all_hashes->GetDictionary(i, &dictionary)) |
| 61 | return false; |
| 62 | |
| 63 | std::string relative_path_utf8; |
| 64 | if (!dictionary->GetString(kPathKey, &relative_path_utf8)) |
| 65 | return false; |
| 66 | |
| 67 | int block_size; |
| 68 | if (!dictionary->GetInteger(kBlockSizeKey, &block_size)) |
| 69 | return false; |
| 70 | if (block_size <= 0 || ((block_size % 1024) != 0)) { |
| 71 | LOG(ERROR) << "Invalid block size: " << block_size; |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 72 | return false; |
| 73 | } |
| 74 | |
| 75 | base::ListValue* hashes_list = NULL; |
| 76 | if (!dictionary->GetList(kBlockHashesKey, &hashes_list)) |
| 77 | return false; |
| 78 | |
| 79 | base::FilePath relative_path = |
| 80 | base::FilePath::FromUTF8Unsafe(relative_path_utf8); |
[email protected] | 4f9bdf6 | 2014-06-28 01:08:22 | [diff] [blame] | 81 | relative_path = relative_path.NormalizePathSeparatorsTo('/'); |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 82 | |
| 83 | data_[relative_path] = HashInfo(block_size, std::vector<std::string>()); |
| 84 | std::vector<std::string>* hashes = &(data_[relative_path].second); |
| 85 | |
| 86 | for (size_t j = 0; j < hashes_list->GetSize(); j++) { |
| 87 | std::string encoded; |
| 88 | if (!hashes_list->GetString(j, &encoded)) |
| 89 | return false; |
| 90 | |
| 91 | hashes->push_back(std::string()); |
| 92 | std::string* decoded = &hashes->back(); |
| 93 | if (!base::Base64Decode(encoded, decoded)) { |
| 94 | hashes->clear(); |
| 95 | return false; |
| 96 | } |
| 97 | } |
| 98 | } |
| 99 | return true; |
| 100 | } |
| 101 | |
| 102 | bool ComputedHashes::Reader::GetHashes(const base::FilePath& relative_path, |
| 103 | int* block_size, |
Istiaque Ahmed | 9bdd9d9 | 2017-12-16 04:53:27 | [diff] [blame^] | 104 | std::vector<std::string>* hashes) const { |
[email protected] | 4f9bdf6 | 2014-06-28 01:08:22 | [diff] [blame] | 105 | base::FilePath path = relative_path.NormalizePathSeparatorsTo('/'); |
Istiaque Ahmed | 9bdd9d9 | 2017-12-16 04:53:27 | [diff] [blame^] | 106 | std::map<base::FilePath, HashInfo>::const_iterator i = data_.find(path); |
asargent | 79369191 | 2014-10-04 01:12:21 | [diff] [blame] | 107 | if (i == data_.end()) { |
| 108 | // If we didn't find the entry using exact match, it's possible the |
| 109 | // developer is using a path with some letters in the incorrect case, which |
| 110 | // happens to work on windows/osx. So try doing a linear scan to look for a |
| 111 | // case-insensitive match. In practice most extensions don't have that big |
| 112 | // a list of files so the performance penalty is probably not too big |
| 113 | // here. Also for crbug.com/29941 we plan to start warning developers when |
| 114 | // they are making this mistake, since their extension will be broken on |
| 115 | // linux/chromeos. |
| 116 | for (i = data_.begin(); i != data_.end(); ++i) { |
| 117 | const base::FilePath& entry = i->first; |
| 118 | if (base::FilePath::CompareEqualIgnoreCase(entry.value(), path.value())) |
| 119 | break; |
| 120 | } |
| 121 | if (i == data_.end()) |
| 122 | return false; |
| 123 | } |
Istiaque Ahmed | 9bdd9d9 | 2017-12-16 04:53:27 | [diff] [blame^] | 124 | const HashInfo& info = i->second; |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 125 | *block_size = info.first; |
| 126 | *hashes = info.second; |
| 127 | return true; |
| 128 | } |
| 129 | |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 130 | ComputedHashes::Writer::Writer() : file_list_(new base::ListValue) { |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 131 | } |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 132 | |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 133 | ComputedHashes::Writer::~Writer() { |
| 134 | } |
| 135 | |
| 136 | void ComputedHashes::Writer::AddHashes(const base::FilePath& relative_path, |
| 137 | int block_size, |
| 138 | const std::vector<std::string>& hashes) { |
Jeremy Roman | 16529d0e | 2017-08-24 18:13:47 | [diff] [blame] | 139 | auto block_hashes = std::make_unique<base::ListValue>(); |
jdoerrie | 6ff270ca | 2017-06-07 10:31:45 | [diff] [blame] | 140 | block_hashes->GetList().reserve(hashes.size()); |
| 141 | for (const auto& hash : hashes) { |
| 142 | std::string encoded; |
| 143 | base::Base64Encode(hash, &encoded); |
| 144 | block_hashes->GetList().emplace_back(std::move(encoded)); |
| 145 | } |
| 146 | |
Jeremy Roman | 16529d0e | 2017-08-24 18:13:47 | [diff] [blame] | 147 | auto dict = std::make_unique<base::DictionaryValue>(); |
[email protected] | 4f9bdf6 | 2014-06-28 01:08:22 | [diff] [blame] | 148 | dict->SetString(kPathKey, |
| 149 | relative_path.NormalizePathSeparatorsTo('/').AsUTF8Unsafe()); |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 150 | dict->SetInteger(kBlockSizeKey, block_size); |
jdoerrie | 6ff270ca | 2017-06-07 10:31:45 | [diff] [blame] | 151 | dict->Set(kBlockHashesKey, std::move(block_hashes)); |
dcheng | f1950200 | 2016-09-14 15:18:18 | [diff] [blame] | 152 | file_list_->Append(std::move(dict)); |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 153 | } |
| 154 | |
| 155 | bool ComputedHashes::Writer::WriteToFile(const base::FilePath& path) { |
| 156 | std::string json; |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 157 | base::DictionaryValue top_dictionary; |
| 158 | top_dictionary.SetInteger(kVersionKey, kVersion); |
jdoerrie | 6ff270ca | 2017-06-07 10:31:45 | [diff] [blame] | 159 | top_dictionary.Set(kFileHashesKey, std::move(file_list_)); |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 160 | |
estade | 8d04646 | 2015-05-16 01:02:34 | [diff] [blame] | 161 | if (!base::JSONWriter::Write(top_dictionary, &json)) |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 162 | return false; |
| 163 | int written = base::WriteFile(path, json.data(), json.size()); |
| 164 | if (static_cast<unsigned>(written) != json.size()) { |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 165 | LOG(ERROR) << "Error writing " << path.AsUTF8Unsafe() |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 166 | << " ; write result:" << written << " expected:" << json.size(); |
| 167 | return false; |
| 168 | } |
| 169 | return true; |
| 170 | } |
| 171 | |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 172 | void ComputedHashes::ComputeHashesForContent(const std::string& contents, |
| 173 | size_t block_size, |
| 174 | std::vector<std::string>* hashes) { |
| 175 | size_t offset = 0; |
| 176 | // Even when the contents is empty, we want to output at least one hash |
| 177 | // block (the hash of the empty string). |
| 178 | do { |
| 179 | const char* block_start = contents.data() + offset; |
| 180 | DCHECK(offset <= contents.size()); |
| 181 | size_t bytes_to_read = std::min(contents.size() - offset, block_size); |
dcheng | f5d24108 | 2016-04-21 03:43:11 | [diff] [blame] | 182 | std::unique_ptr<crypto::SecureHash> hash( |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 183 | crypto::SecureHash::Create(crypto::SecureHash::SHA256)); |
| 184 | hash->Update(block_start, bytes_to_read); |
| 185 | |
| 186 | hashes->push_back(std::string()); |
| 187 | std::string* buffer = &(hashes->back()); |
| 188 | buffer->resize(crypto::kSHA256Length); |
skyostil | 504e502 | 2016-08-12 13:03:59 | [diff] [blame] | 189 | hash->Finish(base::string_as_array(buffer), buffer->size()); |
[email protected] | de00aeb | 2014-08-06 09:13:39 | [diff] [blame] | 190 | |
| 191 | // If |contents| is empty, then we want to just exit here. |
| 192 | if (bytes_to_read == 0) |
| 193 | break; |
| 194 | |
| 195 | offset += bytes_to_read; |
| 196 | } while (offset < contents.size()); |
| 197 | } |
| 198 | |
[email protected] | abd4cb2 | 2014-05-16 05:22:56 | [diff] [blame] | 199 | } // namespace extensions |