浏览代码

Check BlobHeader and Blob content size

Files with BlobHeaders or Blob contents over a certain size are
considered corrupt -> Stop parsing those files and return an error.
Johannes Hofmann 8 年前
父节点
当前提交
fcec5746d3
共有 3 个文件被更改,包括 49 次插入10 次删除
  1. 33
    9
      src/blob.rs
  2. 10
    0
      src/errors.rs
  3. 6
    1
      src/mmap_blob.rs

+ 33
- 9
src/blob.rs 查看文件

@@ -8,7 +8,7 @@ use byteorder::ReadBytesExt;
8 8
 use errors::*;
9 9
 use proto::fileformat;
10 10
 use std::fs::File;
11
-use std::io::{BufReader, ErrorKind, Read};
11
+use std::io::{BufReader, Read};
12 12
 use std::path::Path;
13 13
 use util::{parse_message_from_bytes, parse_message_from_reader};
14 14
 
@@ -19,6 +19,13 @@ use flate2::read::ZlibDecoder;
19 19
 use inflate::DeflateDecoder;
20 20
 
21 21
 
22
+/// Maximum allowed `BlobHeader` size in bytes.
23
+pub static MAX_BLOB_HEADER_SIZE: u64 = 64 * 1024;
24
+
25
+/// Maximum allowed uncompressed `Blob` content size in bytes.
26
+pub static MAX_BLOB_MESSAGE_SIZE: u64 = 32 * 1024 * 1024;
27
+
28
+
22 29
 /// The content type of a blob.
23 30
 #[derive(Debug, Eq, PartialEq)]
24 31
 pub enum BlobType<'a> {
@@ -166,22 +173,27 @@ impl<R: Read> Iterator for BlobReader<R> {
166 173
             return None;
167 174
         }
168 175
 
169
-        let size: u64 = match self.reader.read_u32::<byteorder::BigEndian>() {
176
+        let header_size: u64 = match self.reader.read_u32::<byteorder::BigEndian>() {
170 177
             Ok(n) => u64::from(n),
171 178
             Err(e) => {
172 179
                 match e.kind() {
173
-                    ErrorKind::UnexpectedEof => {
180
+                    ::std::io::ErrorKind::UnexpectedEof => {
174 181
                         return None
175 182
                     },
176 183
                     _ => {
177 184
                         self.last_blob_ok = false;
178
-                        return Some(Err(Error::with_chain(e, "Could not decode blob size")));
185
+                        return Some(Err(Error::with_chain(e, "Could not decode blob header size")));
179 186
                     },
180 187
                 }
181 188
             },
182 189
         };
183 190
 
184
-        let header: fileformat::BlobHeader = match parse_message_from_reader(&mut self.reader.by_ref().take(size)) {
191
+        if header_size >= MAX_BLOB_HEADER_SIZE {
192
+            self.last_blob_ok = false;
193
+            return Some(Err(ErrorKind::BlobHeaderTooBig(header_size).into()));
194
+        }
195
+
196
+        let header: fileformat::BlobHeader = match parse_message_from_reader(&mut self.reader.by_ref().take(header_size)) {
185 197
             Ok(header) => header,
186 198
             Err(e) => {
187 199
                 self.last_blob_ok = false;
@@ -205,9 +217,15 @@ impl<R: Read> Iterator for BlobReader<R> {
205 217
 pub(crate) fn decode_blob<T>(blob: &fileformat::Blob) -> Result<T>
206 218
     where T: protobuf::Message + protobuf::MessageStatic {
207 219
     if blob.has_raw() {
208
-        parse_message_from_bytes(blob.get_raw()).chain_err(|| "Could not parse raw data")
220
+        let size = blob.get_raw().len() as u64;
221
+        if size < MAX_BLOB_MESSAGE_SIZE {
222
+            parse_message_from_bytes(blob.get_raw()).chain_err(|| "Could not parse raw data")
223
+        } else {
224
+            Err(ErrorKind::BlobMessageTooBig(size).into())
225
+        }
209 226
     } else if blob.has_zlib_data() {
210
-        let mut decoder = ZlibDecoder::new(blob.get_zlib_data());
227
+        let mut decoder = ZlibDecoder::new(blob.get_zlib_data())
228
+            .take(MAX_BLOB_MESSAGE_SIZE);
211 229
         parse_message_from_reader(&mut decoder).chain_err(|| "Could not parse zlib data")
212 230
     } else {
213 231
         bail!("Blob is missing fields 'raw' and 'zlib_data")
@@ -218,9 +236,15 @@ pub(crate) fn decode_blob<T>(blob: &fileformat::Blob) -> Result<T>
218 236
 pub(crate) fn decode_blob<T>(blob: &fileformat::Blob) -> Result<T>
219 237
     where T: protobuf::Message + protobuf::MessageStatic {
220 238
     if blob.has_raw() {
221
-        parse_message_from_bytes(blob.get_raw()).chain_err(|| "Could not parse raw data")
239
+        let size = blob.get_raw().len() as u64;
240
+        if size < MAX_BLOB_MESSAGE_SIZE {
241
+            parse_message_from_bytes(blob.get_raw()).chain_err(|| "Could not parse raw data")
242
+        } else {
243
+            Err(ErrorKind::BlobMessageTooBig(size).into())
244
+        }
222 245
     } else if blob.has_zlib_data() {
223
-        let mut decoder = DeflateDecoder::from_zlib(blob.get_zlib_data());
246
+        let mut decoder = DeflateDecoder::from_zlib(blob.get_zlib_data())
247
+            .take(MAX_BLOB_MESSAGE_SIZE);
224 248
         parse_message_from_reader(&mut decoder).chain_err(|| "Could not parse zlib data")
225 249
     } else {
226 250
         bail!("Blob is missing fields 'raw' and 'zlib_data")

+ 10
- 0
src/errors.rs 查看文件

@@ -10,6 +10,16 @@ error_chain!{
10 10
             display("stringtable index out of bounds: {}", index)
11 11
         }
12 12
 
13
+        BlobHeaderTooBig(size: u64) {
14
+            description("blob header is too big")
15
+            display("blob header is too big: {} bytes", size)
16
+        }
17
+
18
+        BlobMessageTooBig(size: u64) {
19
+            description("blob message is too big")
20
+            display("blob message is too big: {} bytes", size)
21
+        }
22
+
13 23
         //TODO add UnexpectedPrimitiveBlock
14 24
     }
15 25
 }

+ 6
- 1
src/mmap_blob.rs 查看文件

@@ -153,7 +153,7 @@ impl<'a> Iterator for MmapBlobReader<'a> {
153 153
             1 ... 3 => {
154 154
                 self.last_blob_ok = false;
155 155
                 let io_error = ::std::io::Error::new(
156
-                    ::std::io::ErrorKind::UnexpectedEof, "failed to parse blob length"
156
+                    ::std::io::ErrorKind::UnexpectedEof, "failed to parse blob header length"
157 157
                 );
158 158
                 return Some(Err(Error::from_kind(ErrorKind::Io(io_error))));
159 159
             },
@@ -162,6 +162,11 @@ impl<'a> Iterator for MmapBlobReader<'a> {
162 162
 
163 163
         let header_size = byteorder::BigEndian::read_u32(slice) as usize;
164 164
 
165
+        if header_size as u64 >= ::blob::MAX_BLOB_HEADER_SIZE {
166
+            self.last_blob_ok = false;
167
+            return Some(Err(ErrorKind::BlobHeaderTooBig(header_size as u64).into()));
168
+        }
169
+
165 170
         if slice.len() < 4 + header_size {
166 171
             self.last_blob_ok = false;
167 172
             let io_error = ::std::io::Error::new(