Lines Matching refs:s

168 static bool fill_temp(struct xz_dec *s, struct xz_buf *b)  in fill_temp()  argument
171 b->in_size - b->in_pos, s->temp.size - s->temp.pos); in fill_temp()
173 memcpy(s->temp.buf + s->temp.pos, b->in + b->in_pos, copy_size); in fill_temp()
175 s->temp.pos += copy_size; in fill_temp()
177 if (s->temp.pos == s->temp.size) { in fill_temp()
178 s->temp.pos = 0; in fill_temp()
186 static enum xz_ret dec_vli(struct xz_dec *s, const uint8_t *in, in dec_vli() argument
191 if (s->pos == 0) in dec_vli()
192 s->vli = 0; in dec_vli()
198 s->vli |= (vli_type)(byte & 0x7F) << s->pos; in dec_vli()
202 if (byte == 0 && s->pos != 0) in dec_vli()
205 s->pos = 0; in dec_vli()
209 s->pos += 7; in dec_vli()
210 if (s->pos == 7 * VLI_BYTES_MAX) in dec_vli()
229 static enum xz_ret dec_block(struct xz_dec *s, struct xz_buf *b) in dec_block() argument
233 s->in_start = b->in_pos; in dec_block()
234 s->out_start = b->out_pos; in dec_block()
237 if (s->bcj_active) in dec_block()
238 ret = xz_dec_bcj_run(s->bcj, s->lzma2, b); in dec_block()
241 ret = xz_dec_lzma2_run(s->lzma2, b); in dec_block()
243 s->block.compressed += b->in_pos - s->in_start; in dec_block()
244 s->block.uncompressed += b->out_pos - s->out_start; in dec_block()
250 if (s->block.compressed > s->block_header.compressed in dec_block()
251 || s->block.uncompressed in dec_block()
252 > s->block_header.uncompressed) in dec_block()
255 if (s->check_type == XZ_CHECK_CRC32) in dec_block()
256 s->crc = xz_crc32(b->out + s->out_start, in dec_block()
257 b->out_pos - s->out_start, s->crc); in dec_block()
259 else if (s->check_type == XZ_CHECK_CRC64) in dec_block()
260 s->crc = xz_crc64(b->out + s->out_start, in dec_block()
261 b->out_pos - s->out_start, s->crc); in dec_block()
265 if (s->block_header.compressed != VLI_UNKNOWN in dec_block()
266 && s->block_header.compressed in dec_block()
267 != s->block.compressed) in dec_block()
270 if (s->block_header.uncompressed != VLI_UNKNOWN in dec_block()
271 && s->block_header.uncompressed in dec_block()
272 != s->block.uncompressed) in dec_block()
275 s->block.hash.unpadded += s->block_header.size in dec_block()
276 + s->block.compressed; in dec_block()
279 s->block.hash.unpadded += check_sizes[s->check_type]; in dec_block()
281 if (s->check_type == XZ_CHECK_CRC32) in dec_block()
282 s->block.hash.unpadded += 4; in dec_block()
283 else if (IS_CRC64(s->check_type)) in dec_block()
284 s->block.hash.unpadded += 8; in dec_block()
287 s->block.hash.uncompressed += s->block.uncompressed; in dec_block()
288 s->block.hash.crc32 = xz_crc32( in dec_block()
289 (const uint8_t *)&s->block.hash, in dec_block()
290 sizeof(s->block.hash), s->block.hash.crc32); in dec_block()
292 ++s->block.count; in dec_block()
299 static void index_update(struct xz_dec *s, const struct xz_buf *b) in index_update() argument
301 size_t in_used = b->in_pos - s->in_start; in index_update()
302 s->index.size += in_used; in index_update()
303 s->crc = xz_crc32(b->in + s->in_start, in_used, s->crc); in index_update()
314 static enum xz_ret dec_index(struct xz_dec *s, struct xz_buf *b) in dec_index() argument
319 ret = dec_vli(s, b->in, &b->in_pos, b->in_size); in dec_index()
321 index_update(s, b); in dec_index()
325 switch (s->index.sequence) { in dec_index()
327 s->index.count = s->vli; in dec_index()
334 if (s->index.count != s->block.count) in dec_index()
337 s->index.sequence = SEQ_INDEX_UNPADDED; in dec_index()
341 s->index.hash.unpadded += s->vli; in dec_index()
342 s->index.sequence = SEQ_INDEX_UNCOMPRESSED; in dec_index()
346 s->index.hash.uncompressed += s->vli; in dec_index()
347 s->index.hash.crc32 = xz_crc32( in dec_index()
348 (const uint8_t *)&s->index.hash, in dec_index()
349 sizeof(s->index.hash), in dec_index()
350 s->index.hash.crc32); in dec_index()
351 --s->index.count; in dec_index()
352 s->index.sequence = SEQ_INDEX_UNPADDED; in dec_index()
355 } while (s->index.count > 0); in dec_index()
365 static enum xz_ret crc_validate(struct xz_dec *s, struct xz_buf *b, in crc_validate() argument
372 if (((s->crc >> s->pos) & 0xFF) != b->in[b->in_pos++]) in crc_validate()
375 s->pos += 8; in crc_validate()
377 } while (s->pos < bits); in crc_validate()
379 s->crc = 0; in crc_validate()
380 s->pos = 0; in crc_validate()
390 static bool check_skip(struct xz_dec *s, struct xz_buf *b) in check_skip() argument
392 while (s->pos < check_sizes[s->check_type]) { in check_skip()
397 ++s->pos; in check_skip()
400 s->pos = 0; in check_skip()
407 static enum xz_ret dec_stream_header(struct xz_dec *s) in dec_stream_header() argument
409 if (!memeq(s->temp.buf, HEADER_MAGIC, HEADER_MAGIC_SIZE)) in dec_stream_header()
412 if (xz_crc32(s->temp.buf + HEADER_MAGIC_SIZE, 2, 0) in dec_stream_header()
413 != get_le32(s->temp.buf + HEADER_MAGIC_SIZE + 2)) in dec_stream_header()
416 if (s->temp.buf[HEADER_MAGIC_SIZE] != 0) in dec_stream_header()
426 s->check_type = s->temp.buf[HEADER_MAGIC_SIZE + 1]; in dec_stream_header()
429 if (s->check_type > XZ_CHECK_MAX) in dec_stream_header()
432 if (s->check_type > XZ_CHECK_CRC32 && !IS_CRC64(s->check_type)) in dec_stream_header()
435 if (s->check_type > XZ_CHECK_CRC32 && !IS_CRC64(s->check_type)) in dec_stream_header()
443 static enum xz_ret dec_stream_footer(struct xz_dec *s) in dec_stream_footer() argument
445 if (!memeq(s->temp.buf + 10, FOOTER_MAGIC, FOOTER_MAGIC_SIZE)) in dec_stream_footer()
448 if (xz_crc32(s->temp.buf + 4, 6, 0) != get_le32(s->temp.buf)) in dec_stream_footer()
456 if ((s->index.size >> 2) != get_le32(s->temp.buf + 4)) in dec_stream_footer()
459 if (s->temp.buf[8] != 0 || s->temp.buf[9] != s->check_type) in dec_stream_footer()
470 static enum xz_ret dec_block_header(struct xz_dec *s) in dec_block_header() argument
478 s->temp.size -= 4; in dec_block_header()
479 if (xz_crc32(s->temp.buf, s->temp.size, 0) in dec_block_header()
480 != get_le32(s->temp.buf + s->temp.size)) in dec_block_header()
483 s->temp.pos = 2; in dec_block_header()
490 if (s->temp.buf[1] & 0x3E) in dec_block_header()
492 if (s->temp.buf[1] & 0x3F) in dec_block_header()
497 if (s->temp.buf[1] & 0x40) { in dec_block_header()
498 if (dec_vli(s, s->temp.buf, &s->temp.pos, s->temp.size) in dec_block_header()
502 s->block_header.compressed = s->vli; in dec_block_header()
504 s->block_header.compressed = VLI_UNKNOWN; in dec_block_header()
508 if (s->temp.buf[1] & 0x80) { in dec_block_header()
509 if (dec_vli(s, s->temp.buf, &s->temp.pos, s->temp.size) in dec_block_header()
513 s->block_header.uncompressed = s->vli; in dec_block_header()
515 s->block_header.uncompressed = VLI_UNKNOWN; in dec_block_header()
520 s->bcj_active = s->temp.buf[1] & 0x01; in dec_block_header()
521 if (s->bcj_active) { in dec_block_header()
522 if (s->temp.size - s->temp.pos < 2) in dec_block_header()
525 ret = xz_dec_bcj_reset(s->bcj, s->temp.buf[s->temp.pos++]); in dec_block_header()
533 if (s->temp.buf[s->temp.pos++] != 0x00) in dec_block_header()
539 if (s->temp.size - s->temp.pos < 2) in dec_block_header()
543 if (s->temp.buf[s->temp.pos++] != 0x21) in dec_block_header()
547 if (s->temp.buf[s->temp.pos++] != 0x01) in dec_block_header()
551 if (s->temp.size - s->temp.pos < 1) in dec_block_header()
554 ret = xz_dec_lzma2_reset(s->lzma2, s->temp.buf[s->temp.pos++]); in dec_block_header()
559 while (s->temp.pos < s->temp.size) in dec_block_header()
560 if (s->temp.buf[s->temp.pos++] != 0x00) in dec_block_header()
563 s->temp.pos = 0; in dec_block_header()
564 s->block.compressed = 0; in dec_block_header()
565 s->block.uncompressed = 0; in dec_block_header()
570 static enum xz_ret dec_main(struct xz_dec *s, struct xz_buf *b) in dec_main() argument
578 s->in_start = b->in_pos; in dec_main()
581 switch (s->sequence) { in dec_main()
591 if (!fill_temp(s, b)) in dec_main()
601 s->sequence = SEQ_BLOCK_START; in dec_main()
603 ret = dec_stream_header(s); in dec_main()
614 s->in_start = b->in_pos++; in dec_main()
615 s->sequence = SEQ_INDEX; in dec_main()
623 s->block_header.size in dec_main()
626 s->temp.size = s->block_header.size; in dec_main()
627 s->temp.pos = 0; in dec_main()
628 s->sequence = SEQ_BLOCK_HEADER; in dec_main()
631 if (!fill_temp(s, b)) in dec_main()
634 ret = dec_block_header(s); in dec_main()
638 s->sequence = SEQ_BLOCK_UNCOMPRESS; in dec_main()
641 ret = dec_block(s, b); in dec_main()
645 s->sequence = SEQ_BLOCK_PADDING; in dec_main()
655 while (s->block.compressed & 3) { in dec_main()
662 ++s->block.compressed; in dec_main()
665 s->sequence = SEQ_BLOCK_CHECK; in dec_main()
668 if (s->check_type == XZ_CHECK_CRC32) { in dec_main()
669 ret = crc_validate(s, b, 32); in dec_main()
673 else if (IS_CRC64(s->check_type)) { in dec_main()
674 ret = crc_validate(s, b, 64); in dec_main()
679 else if (!check_skip(s, b)) { in dec_main()
684 s->sequence = SEQ_BLOCK_START; in dec_main()
688 ret = dec_index(s, b); in dec_main()
692 s->sequence = SEQ_INDEX_PADDING; in dec_main()
695 while ((s->index.size + (b->in_pos - s->in_start)) in dec_main()
698 index_update(s, b); in dec_main()
707 index_update(s, b); in dec_main()
710 if (!memeq(&s->block.hash, &s->index.hash, in dec_main()
711 sizeof(s->block.hash))) in dec_main()
714 s->sequence = SEQ_INDEX_CRC32; in dec_main()
717 ret = crc_validate(s, b, 32); in dec_main()
721 s->temp.size = STREAM_HEADER_SIZE; in dec_main()
722 s->sequence = SEQ_STREAM_FOOTER; in dec_main()
725 if (!fill_temp(s, b)) in dec_main()
728 return dec_stream_footer(s); in dec_main()
760 XZ_EXTERN enum xz_ret xz_dec_run(struct xz_dec *s, struct xz_buf *b) in xz_dec_run() argument
766 if (DEC_IS_SINGLE(s->mode)) in xz_dec_run()
767 xz_dec_reset(s); in xz_dec_run()
771 ret = dec_main(s, b); in xz_dec_run()
773 if (DEC_IS_SINGLE(s->mode)) { in xz_dec_run()
785 if (s->allow_buf_error) in xz_dec_run()
788 s->allow_buf_error = true; in xz_dec_run()
790 s->allow_buf_error = false; in xz_dec_run()
798 struct xz_dec *s = kmalloc(sizeof(*s), GFP_KERNEL); in xz_dec_init() local
799 if (s == NULL) in xz_dec_init()
802 s->mode = mode; in xz_dec_init()
805 s->bcj = xz_dec_bcj_create(DEC_IS_SINGLE(mode)); in xz_dec_init()
806 if (s->bcj == NULL) in xz_dec_init()
810 s->lzma2 = xz_dec_lzma2_create(mode, dict_max); in xz_dec_init()
811 if (s->lzma2 == NULL) in xz_dec_init()
814 xz_dec_reset(s); in xz_dec_init()
815 return s; in xz_dec_init()
819 xz_dec_bcj_end(s->bcj); in xz_dec_init()
822 kfree(s); in xz_dec_init()
826 XZ_EXTERN void xz_dec_reset(struct xz_dec *s) in xz_dec_reset() argument
828 s->sequence = SEQ_STREAM_HEADER; in xz_dec_reset()
829 s->allow_buf_error = false; in xz_dec_reset()
830 s->pos = 0; in xz_dec_reset()
831 s->crc = 0; in xz_dec_reset()
832 memzero(&s->block, sizeof(s->block)); in xz_dec_reset()
833 memzero(&s->index, sizeof(s->index)); in xz_dec_reset()
834 s->temp.pos = 0; in xz_dec_reset()
835 s->temp.size = STREAM_HEADER_SIZE; in xz_dec_reset()
838 XZ_EXTERN void xz_dec_end(struct xz_dec *s) in xz_dec_end() argument
840 if (s != NULL) { in xz_dec_end()
841 xz_dec_lzma2_end(s->lzma2); in xz_dec_end()
843 xz_dec_bcj_end(s->bcj); in xz_dec_end()
845 kfree(s); in xz_dec_end()