1 /*
2  * Copyright (C) 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 use libc::EIO;
18 use std::io;
19 
20 use super::common::{build_fsverity_digest, merkle_tree_height, FsverityError};
21 use super::sys::{FS_VERITY_HASH_ALG_SHA256, FS_VERITY_MAGIC};
22 use crate::auth::Authenticator;
23 use crate::common::{divide_roundup, CHUNK_SIZE};
24 use crate::crypto::{CryptoError, Sha256Hasher};
25 use crate::file::{ChunkBuffer, ReadByChunk};
26 
27 const ZEROS: [u8; CHUNK_SIZE as usize] = [0u8; CHUNK_SIZE as usize];
28 
29 // The size of `struct fsverity_formatted_digest` in Linux with SHA-256.
30 const SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256: usize = 12 + Sha256Hasher::HASH_SIZE;
31 
32 type HashBuffer = [u8; Sha256Hasher::HASH_SIZE];
33 
34 fn hash_with_padding(chunk: &[u8], pad_to: usize) -> Result<HashBuffer, CryptoError> {
35     let padding_size = pad_to - chunk.len();
36     Sha256Hasher::new()?.update(&chunk)?.update(&ZEROS[..padding_size])?.finalize()
37 }
38 
39 fn verity_check<T: ReadByChunk>(
40     chunk: &[u8],
41     chunk_index: u64,
42     file_size: u64,
43     merkle_tree: &T,
44 ) -> Result<HashBuffer, FsverityError> {
45     // The caller should not be able to produce a chunk at the first place if `file_size` is 0. The
46     // current implementation expects to crash when a `ReadByChunk` implementation reads
47     // beyond the file size, including empty file.
48     assert_ne!(file_size, 0);
49 
50     let chunk_hash = hash_with_padding(&chunk, CHUNK_SIZE as usize)?;
51 
52     fsverity_walk(chunk_index, file_size, merkle_tree)?.try_fold(
53         chunk_hash,
54         |actual_hash, result| {
55             let (merkle_chunk, hash_offset_in_chunk) = result?;
56             let expected_hash =
57                 &merkle_chunk[hash_offset_in_chunk..hash_offset_in_chunk + Sha256Hasher::HASH_SIZE];
58             if actual_hash != expected_hash {
59                 return Err(FsverityError::CannotVerify);
60             }
61             Ok(hash_with_padding(&merkle_chunk, CHUNK_SIZE as usize)?)
62         },
63     )
64 }
65 
66 /// Given a chunk index and the size of the file, returns an iterator that walks the Merkle tree
67 /// from the leaf to the root. The iterator carries the slice of the chunk/node as well as the
68 /// offset of the child node's hash. It is up to the iterator user to use the node and hash,
69 /// e.g. for the actual verification.
70 #[allow(clippy::needless_collect)]
71 fn fsverity_walk<T: ReadByChunk>(
72     chunk_index: u64,
73     file_size: u64,
74     merkle_tree: &T,
75 ) -> Result<impl Iterator<Item = Result<([u8; 4096], usize), FsverityError>> + '_, FsverityError> {
76     let hashes_per_node = CHUNK_SIZE / Sha256Hasher::HASH_SIZE as u64;
77     debug_assert_eq!(hashes_per_node, 128u64);
78     let max_level = merkle_tree_height(file_size).expect("file should not be empty") as u32;
79     let root_to_leaf_steps = (0..=max_level)
80         .rev()
81         .map(|x| {
82             let leaves_per_hash = hashes_per_node.pow(x);
83             let leaves_size_per_hash = CHUNK_SIZE * leaves_per_hash;
84             let leaves_size_per_node = leaves_size_per_hash * hashes_per_node;
85             let nodes_at_level = divide_roundup(file_size, leaves_size_per_node);
86             let level_size = nodes_at_level * CHUNK_SIZE;
87             let offset_in_level = (chunk_index / leaves_per_hash) * Sha256Hasher::HASH_SIZE as u64;
88             (level_size, offset_in_level)
89         })
90         .scan(0, |level_offset, (level_size, offset_in_level)| {
91             let this_level_offset = *level_offset;
92             *level_offset += level_size;
93             let global_hash_offset = this_level_offset + offset_in_level;
94             Some(global_hash_offset)
95         })
96         .map(|global_hash_offset| {
97             let chunk_index = global_hash_offset / CHUNK_SIZE;
98             let hash_offset_in_chunk = (global_hash_offset % CHUNK_SIZE) as usize;
99             (chunk_index, hash_offset_in_chunk)
100         })
101         .collect::<Vec<_>>(); // Needs to collect first to be able to reverse below.
102 
103     Ok(root_to_leaf_steps.into_iter().rev().map(move |(chunk_index, hash_offset_in_chunk)| {
104         let mut merkle_chunk = [0u8; 4096];
105         // read_chunk is supposed to return a full chunk, or an incomplete one at the end of the
106         // file. In the incomplete case, the hash is calculated with 0-padding to the chunk size.
107         // Therefore, we don't need to check the returned size here.
108         let _ = merkle_tree.read_chunk(chunk_index, &mut merkle_chunk)?;
109         Ok((merkle_chunk, hash_offset_in_chunk))
110     }))
111 }
112 
113 fn build_fsverity_formatted_digest(
114     root_hash: &HashBuffer,
115     file_size: u64,
116 ) -> Result<[u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256], CryptoError> {
117     let digest = build_fsverity_digest(root_hash, file_size)?;
118     // Little-endian byte representation of fsverity_formatted_digest from linux/fsverity.h
119     // Not FFI-ed as it seems easier to deal with the raw bytes manually.
120     let mut formatted_digest = [0u8; SIZE_OF_FSVERITY_FORMATTED_DIGEST_SHA256];
121     formatted_digest[0..8].copy_from_slice(FS_VERITY_MAGIC);
122     formatted_digest[8..10].copy_from_slice(&(FS_VERITY_HASH_ALG_SHA256 as u16).to_le_bytes());
123     formatted_digest[10..12].copy_from_slice(&(Sha256Hasher::HASH_SIZE as u16).to_le_bytes());
124     formatted_digest[12..].copy_from_slice(&digest);
125     Ok(formatted_digest)
126 }
127 
128 pub struct VerifiedFileReader<F: ReadByChunk, M: ReadByChunk> {
129     chunked_file: F,
130     file_size: u64,
131     merkle_tree: M,
132     root_hash: HashBuffer,
133 }
134 
135 impl<F: ReadByChunk, M: ReadByChunk> VerifiedFileReader<F, M> {
136     pub fn new<A: Authenticator>(
137         authenticator: &A,
138         chunked_file: F,
139         file_size: u64,
140         sig: Vec<u8>,
141         merkle_tree: M,
142     ) -> Result<VerifiedFileReader<F, M>, FsverityError> {
143         let mut buf = [0u8; CHUNK_SIZE as usize];
144         let size = merkle_tree.read_chunk(0, &mut buf)?;
145         if buf.len() != size {
146             return Err(FsverityError::InsufficientData(size));
147         }
148         let root_hash = Sha256Hasher::new()?.update(&buf[..])?.finalize()?;
149         let formatted_digest = build_fsverity_formatted_digest(&root_hash, file_size)?;
150         let valid = authenticator.verify(&sig, &formatted_digest)?;
151         if valid {
152             Ok(VerifiedFileReader { chunked_file, file_size, merkle_tree, root_hash })
153         } else {
154             Err(FsverityError::BadSignature)
155         }
156     }
157 }
158 
159 impl<F: ReadByChunk, M: ReadByChunk> ReadByChunk for VerifiedFileReader<F, M> {
160     fn read_chunk(&self, chunk_index: u64, buf: &mut ChunkBuffer) -> io::Result<usize> {
161         let size = self.chunked_file.read_chunk(chunk_index, buf)?;
162         let root_hash = verity_check(&buf[..size], chunk_index, self.file_size, &self.merkle_tree)
163             .map_err(|_| io::Error::from_raw_os_error(EIO))?;
164         if root_hash != self.root_hash {
165             Err(io::Error::from_raw_os_error(EIO))
166         } else {
167             Ok(size)
168         }
169     }
170 }
171 
172 #[cfg(test)]
173 mod tests {
174     use super::*;
175     use crate::auth::FakeAuthenticator;
176     use crate::file::{LocalFileReader, ReadByChunk};
177     use anyhow::Result;
178     use std::fs::{self, File};
179     use std::io::Read;
180 
181     type LocalVerifiedFileReader = VerifiedFileReader<LocalFileReader, LocalFileReader>;
182 
183     fn total_chunk_number(file_size: u64) -> u64 {
184         (file_size + 4095) / 4096
185     }
186 
187     // Returns a reader with fs-verity verification and the file size.
188     fn new_reader_with_fsverity(
189         content_path: &str,
190         merkle_tree_path: &str,
191         signature_path: &str,
192     ) -> Result<(LocalVerifiedFileReader, u64)> {
193         let file_reader = LocalFileReader::new(File::open(content_path)?)?;
194         let file_size = file_reader.len();
195         let merkle_tree = LocalFileReader::new(File::open(merkle_tree_path)?)?;
196         let mut sig = Vec::new();
197         let _ = File::open(signature_path)?.read_to_end(&mut sig)?;
198         let authenticator = FakeAuthenticator::always_succeed();
199         Ok((
200             VerifiedFileReader::new(&authenticator, file_reader, file_size, sig, merkle_tree)?,
201             file_size,
202         ))
203     }
204 
205     #[test]
206     fn fsverity_verify_full_read_4k() -> Result<()> {
207         let (file_reader, file_size) = new_reader_with_fsverity(
208             "testdata/input.4k",
209             "testdata/input.4k.merkle_dump",
210             "testdata/input.4k.fsv_sig",
211         )?;
212 
213         for i in 0..total_chunk_number(file_size) {
214             let mut buf = [0u8; 4096];
215             assert!(file_reader.read_chunk(i, &mut buf).is_ok());
216         }
217         Ok(())
218     }
219 
220     #[test]
221     fn fsverity_verify_full_read_4k1() -> Result<()> {
222         let (file_reader, file_size) = new_reader_with_fsverity(
223             "testdata/input.4k1",
224             "testdata/input.4k1.merkle_dump",
225             "testdata/input.4k1.fsv_sig",
226         )?;
227 
228         for i in 0..total_chunk_number(file_size) {
229             let mut buf = [0u8; 4096];
230             assert!(file_reader.read_chunk(i, &mut buf).is_ok());
231         }
232         Ok(())
233     }
234 
235     #[test]
236     fn fsverity_verify_full_read_4m() -> Result<()> {
237         let (file_reader, file_size) = new_reader_with_fsverity(
238             "testdata/input.4m",
239             "testdata/input.4m.merkle_dump",
240             "testdata/input.4m.fsv_sig",
241         )?;
242 
243         for i in 0..total_chunk_number(file_size) {
244             let mut buf = [0u8; 4096];
245             assert!(file_reader.read_chunk(i, &mut buf).is_ok());
246         }
247         Ok(())
248     }
249 
250     #[test]
251     fn fsverity_verify_bad_merkle_tree() -> Result<()> {
252         let (file_reader, _) = new_reader_with_fsverity(
253             "testdata/input.4m",
254             "testdata/input.4m.merkle_dump.bad", // First leaf node is corrupted.
255             "testdata/input.4m.fsv_sig",
256         )?;
257 
258         // A lowest broken node (a 4K chunk that contains 128 sha256 hashes) will fail the read
259         // failure of the underlying chunks, but not before or after.
260         let mut buf = [0u8; 4096];
261         let num_hashes = 4096 / 32;
262         let last_index = num_hashes;
263         for i in 0..last_index {
264             assert!(file_reader.read_chunk(i, &mut buf).is_err());
265         }
266         assert!(file_reader.read_chunk(last_index, &mut buf).is_ok());
267         Ok(())
268     }
269 
270     #[test]
271     fn invalid_signature() -> Result<()> {
272         let authenticator = FakeAuthenticator::always_fail();
273         let file_reader = LocalFileReader::new(File::open("testdata/input.4m")?)?;
274         let file_size = file_reader.len();
275         let merkle_tree = LocalFileReader::new(File::open("testdata/input.4m.merkle_dump")?)?;
276         let sig = fs::read("testdata/input.4m.fsv_sig")?;
277         assert!(VerifiedFileReader::new(&authenticator, file_reader, file_size, sig, merkle_tree)
278             .is_err());
279         Ok(())
280     }
281 }
282