summaryrefslogtreecommitdiff
path: root/archival/libarchive/decompress_gunzip.c (plain)
blob: b1d4989ea9116b98c380d1311e16c8fd3c9c48c3
1/* vi: set sw=4 ts=4: */
2/*
3 * gunzip implementation for busybox
4 *
5 * Based on GNU gzip v1.2.4 Copyright (C) 1992-1993 Jean-loup Gailly.
6 *
7 * Originally adjusted for busybox by Sven Rudolph <sr1@inf.tu-dresden.de>
8 * based on gzip sources
9 *
10 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support
11 * files as well as stdin/stdout, and to generally behave itself wrt
12 * command line handling.
13 *
14 * General cleanup to better adhere to the style guide and make use of standard
15 * busybox functions by Glenn McGrath
16 *
17 * read_gz interface + associated hacking by Laurence Anderson
18 *
19 * Fixed huft_build() so decoding end-of-block code does not grab more bits
20 * than necessary (this is required by unzip applet), added inflate_cleanup()
21 * to free leaked bytebuffer memory (used in unzip.c), and some minor style
22 * guide cleanups by Ed Clark
23 *
24 * gzip (GNU zip) -- compress files with zip algorithm and 'compress' interface
25 * Copyright (C) 1992-1993 Jean-loup Gailly
26 * The unzip code was written and put in the public domain by Mark Adler.
27 * Portions of the lzw code are derived from the public domain 'compress'
28 * written by Spencer Thomas, Joe Orost, James Woods, Jim McKie, Steve Davies,
29 * Ken Turkowski, Dave Mack and Peter Jannesen.
30 *
31 * See the file algorithm.doc for the compression algorithms and file formats.
32 *
33 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
34 */
35
36#include <setjmp.h>
37#include "libbb.h"
38#include "bb_archive.h"
39
40typedef struct huft_t {
41 unsigned char e; /* number of extra bits or operation */
42 unsigned char b; /* number of bits in this code or subcode */
43 union {
44 unsigned short n; /* literal, length base, or distance base */
45 struct huft_t *t; /* pointer to next level of table */
46 } v;
47} huft_t;
48
49enum {
50 /* gunzip_window size--must be a power of two, and
51 * at least 32K for zip's deflate method */
52 GUNZIP_WSIZE = 0x8000,
53 /* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
54 BMAX = 16, /* maximum bit length of any code (16 for explode) */
55 N_MAX = 288, /* maximum number of codes in any set */
56};
57
58
59/* This is somewhat complex-looking arrangement, but it allows
60 * to place decompressor state either in bss or in
61 * malloc'ed space simply by changing #defines below.
62 * Sizes on i386:
63 * text data bss dec hex
64 * 5256 0 108 5364 14f4 - bss
65 * 4915 0 0 4915 1333 - malloc
66 */
67#define STATE_IN_BSS 0
68#define STATE_IN_MALLOC 1
69
70
71typedef struct state_t {
72 off_t gunzip_bytes_out; /* number of output bytes */
73 uint32_t gunzip_crc;
74
75 int gunzip_src_fd;
76 unsigned gunzip_outbuf_count; /* bytes in output buffer */
77
78 unsigned char *gunzip_window;
79
80 uint32_t *gunzip_crc_table;
81
82 /* bitbuffer */
83 unsigned gunzip_bb; /* bit buffer */
84 unsigned char gunzip_bk; /* bits in bit buffer */
85
86 /* input (compressed) data */
87 unsigned char *bytebuffer; /* buffer itself */
88 off_t to_read; /* compressed bytes to read (unzip only, -1 for gunzip) */
89// unsigned bytebuffer_max; /* buffer size */
90 unsigned bytebuffer_offset; /* buffer position */
91 unsigned bytebuffer_size; /* how much data is there (size <= max) */
92
93 /* private data of inflate_codes() */
94 unsigned inflate_codes_ml; /* masks for bl and bd bits */
95 unsigned inflate_codes_md; /* masks for bl and bd bits */
96 unsigned inflate_codes_bb; /* bit buffer */
97 unsigned inflate_codes_k; /* number of bits in bit buffer */
98 unsigned inflate_codes_w; /* current gunzip_window position */
99 huft_t *inflate_codes_tl;
100 huft_t *inflate_codes_td;
101 unsigned inflate_codes_bl;
102 unsigned inflate_codes_bd;
103 unsigned inflate_codes_nn; /* length and index for copy */
104 unsigned inflate_codes_dd;
105
106 smallint resume_copy;
107
108 /* private data of inflate_get_next_window() */
109 smallint method; /* method == -1 for stored, -2 for codes */
110 smallint need_another_block;
111 smallint end_reached;
112
113 /* private data of inflate_stored() */
114 unsigned inflate_stored_n;
115 unsigned inflate_stored_b;
116 unsigned inflate_stored_k;
117 unsigned inflate_stored_w;
118
119 const char *error_msg;
120 jmp_buf error_jmp;
121} state_t;
122#define gunzip_bytes_out (S()gunzip_bytes_out )
123#define gunzip_crc (S()gunzip_crc )
124#define gunzip_src_fd (S()gunzip_src_fd )
125#define gunzip_outbuf_count (S()gunzip_outbuf_count)
126#define gunzip_window (S()gunzip_window )
127#define gunzip_crc_table (S()gunzip_crc_table )
128#define gunzip_bb (S()gunzip_bb )
129#define gunzip_bk (S()gunzip_bk )
130#define to_read (S()to_read )
131// #define bytebuffer_max (S()bytebuffer_max )
132// Both gunzip and unzip can use constant buffer size now (16k):
133#define bytebuffer_max 0x4000
134#define bytebuffer (S()bytebuffer )
135#define bytebuffer_offset (S()bytebuffer_offset )
136#define bytebuffer_size (S()bytebuffer_size )
137#define inflate_codes_ml (S()inflate_codes_ml )
138#define inflate_codes_md (S()inflate_codes_md )
139#define inflate_codes_bb (S()inflate_codes_bb )
140#define inflate_codes_k (S()inflate_codes_k )
141#define inflate_codes_w (S()inflate_codes_w )
142#define inflate_codes_tl (S()inflate_codes_tl )
143#define inflate_codes_td (S()inflate_codes_td )
144#define inflate_codes_bl (S()inflate_codes_bl )
145#define inflate_codes_bd (S()inflate_codes_bd )
146#define inflate_codes_nn (S()inflate_codes_nn )
147#define inflate_codes_dd (S()inflate_codes_dd )
148#define resume_copy (S()resume_copy )
149#define method (S()method )
150#define need_another_block (S()need_another_block )
151#define end_reached (S()end_reached )
152#define inflate_stored_n (S()inflate_stored_n )
153#define inflate_stored_b (S()inflate_stored_b )
154#define inflate_stored_k (S()inflate_stored_k )
155#define inflate_stored_w (S()inflate_stored_w )
156#define error_msg (S()error_msg )
157#define error_jmp (S()error_jmp )
158
159/* This is a generic part */
160#if STATE_IN_BSS /* Use global data segment */
161#define DECLARE_STATE /*nothing*/
162#define ALLOC_STATE /*nothing*/
163#define DEALLOC_STATE ((void)0)
164#define S() state.
165#define PASS_STATE /*nothing*/
166#define PASS_STATE_ONLY /*nothing*/
167#define STATE_PARAM /*nothing*/
168#define STATE_PARAM_ONLY void
169static state_t state;
170#endif
171
172#if STATE_IN_MALLOC /* Use malloc space */
173#define DECLARE_STATE state_t *state
174#define ALLOC_STATE (state = xzalloc(sizeof(*state)))
175#define DEALLOC_STATE free(state)
176#define S() state->
177#define PASS_STATE state,
178#define PASS_STATE_ONLY state
179#define STATE_PARAM state_t *state,
180#define STATE_PARAM_ONLY state_t *state
181#endif
182
183
184static const uint16_t mask_bits[] ALIGN2 = {
185 0x0000, 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
186 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
187};
188
189/* Copy lengths for literal codes 257..285 */
190static const uint16_t cplens[] ALIGN2 = {
191 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
192 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0
193};
194
195/* note: see note #13 above about the 258 in this list. */
196/* Extra bits for literal codes 257..285 */
197static const uint8_t cplext[] ALIGN1 = {
198 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5,
199 5, 5, 5, 0, 99, 99
200}; /* 99 == invalid */
201
202/* Copy offsets for distance codes 0..29 */
203static const uint16_t cpdist[] ALIGN2 = {
204 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513,
205 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
206};
207
208/* Extra bits for distance codes */
209static const uint8_t cpdext[] ALIGN1 = {
210 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10,
211 11, 11, 12, 12, 13, 13
212};
213
214/* Tables for deflate from PKZIP's appnote.txt. */
215/* Order of the bit length code lengths */
216static const uint8_t border[] ALIGN1 = {
217 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
218};
219
220
221/*
222 * Free the malloc'ed tables built by huft_build(), which makes a linked
223 * list of the tables it made, with the links in a dummy first entry of
224 * each table.
225 * t: table to free
226 */
227static void huft_free(huft_t *p)
228{
229 huft_t *q;
230
231 /* Go through linked list, freeing from the malloced (t[-1]) address. */
232 while (p) {
233 q = (--p)->v.t;
234 free(p);
235 p = q;
236 }
237}
238
239static void huft_free_all(STATE_PARAM_ONLY)
240{
241 huft_free(inflate_codes_tl);
242 huft_free(inflate_codes_td);
243 inflate_codes_tl = NULL;
244 inflate_codes_td = NULL;
245}
246
247static void abort_unzip(STATE_PARAM_ONLY) NORETURN;
248static void abort_unzip(STATE_PARAM_ONLY)
249{
250 huft_free_all(PASS_STATE_ONLY);
251 longjmp(error_jmp, 1);
252}
253
254static unsigned fill_bitbuffer(STATE_PARAM unsigned bitbuffer, unsigned *current, const unsigned required)
255{
256 while (*current < required) {
257 if (bytebuffer_offset >= bytebuffer_size) {
258 unsigned sz = bytebuffer_max - 4;
259 if (to_read >= 0 && (unsigned) to_read < sz) /* unzip only */
260 sz = to_read;
261 /* Leave the first 4 bytes empty so we can always unwind the bitbuffer
262 * to the front of the bytebuffer */
263 bytebuffer_size = safe_read(gunzip_src_fd, &bytebuffer[4], sz);
264 if ((int)bytebuffer_size < 1) {
265 error_msg = "unexpected end of file";
266 abort_unzip(PASS_STATE_ONLY);
267 }
268 if (to_read >= 0) /* unzip only */
269 to_read -= bytebuffer_size;
270 bytebuffer_size += 4;
271 bytebuffer_offset = 4;
272 }
273 bitbuffer |= ((unsigned) bytebuffer[bytebuffer_offset]) << *current;
274 bytebuffer_offset++;
275 *current += 8;
276 }
277 return bitbuffer;
278}
279
280
281/* Given a list of code lengths and a maximum table size, make a set of
282 * tables to decode that set of codes. Return zero on success, one if
283 * the given code set is incomplete (the tables are still built in this
284 * case), two if the input is invalid (all zero length codes or an
285 * oversubscribed set of lengths) - in this case stores NULL in *t.
286 *
287 * b: code lengths in bits (all assumed <= BMAX)
288 * n: number of codes (assumed <= N_MAX)
289 * s: number of simple-valued codes (0..s-1)
290 * d: list of base values for non-simple codes
291 * e: list of extra bits for non-simple codes
292 * t: result: starting table
293 * m: maximum lookup bits, returns actual
294 */
295static int huft_build(const unsigned *b, const unsigned n,
296 const unsigned s, const unsigned short *d,
297 const unsigned char *e, huft_t **t, unsigned *m)
298{
299 unsigned a; /* counter for codes of length k */
300 unsigned c[BMAX + 1]; /* bit length count table */
301 unsigned eob_len; /* length of end-of-block code (value 256) */
302 unsigned f; /* i repeats in table every f entries */
303 int g; /* maximum code length */
304 int htl; /* table level */
305 unsigned i; /* counter, current code */
306 unsigned j; /* counter */
307 int k; /* number of bits in current code */
308 const unsigned *p; /* pointer into c[], b[], or v[] */
309 huft_t *q; /* points to current table */
310 huft_t r; /* table entry for structure assignment */
311 huft_t *u[BMAX]; /* table stack */
312 unsigned v[N_MAX + 1]; /* values in order of bit length. last v[] is never used */
313 int ws[BMAX + 1]; /* bits decoded stack */
314 int w; /* bits decoded */
315 unsigned x[BMAX + 1]; /* bit offsets, then code stack */
316 unsigned *xp; /* pointer into x */
317 int y; /* number of dummy codes added */
318 unsigned z; /* number of entries in current table */
319
320 /* Length of EOB code, if any */
321 eob_len = n > 256 ? b[256] : BMAX;
322
323 *t = NULL;
324
325 /* Generate counts for each bit length */
326 memset(c, 0, sizeof(c));
327 p = b;
328 i = n;
329 do {
330 c[*p]++; /* assume all entries <= BMAX */
331 p++; /* can't combine with above line (Solaris bug) */
332 } while (--i);
333 if (c[0] == n) { /* null input - all zero length codes */
334 *m = 0;
335 return 2;
336 }
337
338 /* Find minimum and maximum length, bound *m by those */
339 for (j = 1; (j <= BMAX) && (c[j] == 0); j++)
340 continue;
341 k = j; /* minimum code length */
342 for (i = BMAX; (c[i] == 0) && i; i--)
343 continue;
344 g = i; /* maximum code length */
345 *m = (*m < j) ? j : ((*m > i) ? i : *m);
346
347 /* Adjust last length count to fill out codes, if needed */
348 for (y = 1 << j; j < i; j++, y <<= 1) {
349 y -= c[j];
350 if (y < 0)
351 return 2; /* bad input: more codes than bits */
352 }
353 y -= c[i];
354 if (y < 0)
355 return 2;
356 c[i] += y;
357
358 /* Generate starting offsets into the value table for each length */
359 x[1] = j = 0;
360 p = c + 1;
361 xp = x + 2;
362 while (--i) { /* note that i == g from above */
363 j += *p++;
364 *xp++ = j;
365 }
366
367 /* Make a table of values in order of bit lengths.
368 * To detect bad input, unused v[i]'s are set to invalid value UINT_MAX.
369 * In particular, last v[i] is never filled and must not be accessed.
370 */
371 memset(v, 0xff, sizeof(v));
372 p = b;
373 i = 0;
374 do {
375 j = *p++;
376 if (j != 0) {
377 v[x[j]++] = i;
378 }
379 } while (++i < n);
380
381 /* Generate the Huffman codes and for each, make the table entries */
382 x[0] = i = 0; /* first Huffman code is zero */
383 p = v; /* grab values in bit order */
384 htl = -1; /* no tables yet--level -1 */
385 w = ws[0] = 0; /* bits decoded */
386 u[0] = NULL; /* just to keep compilers happy */
387 q = NULL; /* ditto */
388 z = 0; /* ditto */
389
390 /* go through the bit lengths (k already is bits in shortest code) */
391 for (; k <= g; k++) {
392 a = c[k];
393 while (a--) {
394 /* here i is the Huffman code of length k bits for value *p */
395 /* make tables up to required level */
396 while (k > ws[htl + 1]) {
397 w = ws[++htl];
398
399 /* compute minimum size table less than or equal to *m bits */
400 z = g - w;
401 z = z > *m ? *m : z; /* upper limit on table size */
402 j = k - w;
403 f = 1 << j;
404 if (f > a + 1) { /* try a k-w bit table */
405 /* too few codes for k-w bit table */
406 f -= a + 1; /* deduct codes from patterns left */
407 xp = c + k;
408 while (++j < z) { /* try smaller tables up to z bits */
409 f <<= 1;
410 if (f <= *++xp) {
411 break; /* enough codes to use up j bits */
412 }
413 f -= *xp; /* else deduct codes from patterns */
414 }
415 }
416 j = ((unsigned) (w + j) > eob_len && w >= 0 && (unsigned) w < eob_len) ? eob_len - w : j; /* make EOB code end at table */
417 z = 1 << j; /* table entries for j-bit table */
418 ws[htl+1] = w + j; /* set bits decoded in stack */
419
420 /* allocate and link in new table */
421 q = xzalloc((z + 1) * sizeof(huft_t));
422 *t = q + 1; /* link to list for huft_free() */
423 t = &(q->v.t);
424 u[htl] = ++q; /* table starts after link */
425
426 /* connect to last table, if there is one */
427 if (htl) {
428 x[htl] = i; /* save pattern for backing up */
429 r.b = (unsigned char) (w - ws[htl - 1]); /* bits to dump before this table */
430 r.e = (unsigned char) (16 + j); /* bits in this table */
431 r.v.t = q; /* pointer to this table */
432 j = (i & ((1 << w) - 1)) >> ws[htl - 1];
433 u[htl - 1][j] = r; /* connect to last table */
434 }
435 }
436
437 /* set up table entry in r */
438 r.b = (unsigned char) (k - w);
439 if (/*p >= v + n || -- redundant, caught by the second check: */
440 *p == UINT_MAX /* do we access uninited v[i]? (see memset(v))*/
441 ) {
442 r.e = 99; /* out of values--invalid code */
443 } else if (*p < s) {
444 r.e = (unsigned char) (*p < 256 ? 16 : 15); /* 256 is EOB code */
445 r.v.n = (unsigned short) (*p++); /* simple code is just the value */
446 } else {
447 r.e = (unsigned char) e[*p - s]; /* non-simple--look up in lists */
448 r.v.n = d[*p++ - s];
449 }
450
451 /* fill code-like entries with r */
452 f = 1 << (k - w);
453 for (j = i >> w; j < z; j += f) {
454 q[j] = r;
455 }
456
457 /* backwards increment the k-bit code i */
458 for (j = 1 << (k - 1); i & j; j >>= 1) {
459 i ^= j;
460 }
461 i ^= j;
462
463 /* backup over finished tables */
464 while ((i & ((1 << w) - 1)) != x[htl]) {
465 w = ws[--htl];
466 }
467 }
468 }
469
470 /* return actual size of base table */
471 *m = ws[1];
472
473 /* Return 1 if we were given an incomplete table */
474 return y != 0 && g != 1;
475}
476
477
478/*
479 * inflate (decompress) the codes in a deflated (compressed) block.
480 * Return an error code or zero if it all goes ok.
481 *
482 * tl, td: literal/length and distance decoder tables
483 * bl, bd: number of bits decoded by tl[] and td[]
484 */
485/* called once from inflate_block */
486
487/* map formerly local static variables to globals */
488#define ml inflate_codes_ml
489#define md inflate_codes_md
490#define bb inflate_codes_bb
491#define k inflate_codes_k
492#define w inflate_codes_w
493#define tl inflate_codes_tl
494#define td inflate_codes_td
495#define bl inflate_codes_bl
496#define bd inflate_codes_bd
497#define nn inflate_codes_nn
498#define dd inflate_codes_dd
499static void inflate_codes_setup(STATE_PARAM unsigned my_bl, unsigned my_bd)
500{
501 bl = my_bl;
502 bd = my_bd;
503 /* make local copies of globals */
504 bb = gunzip_bb; /* initialize bit buffer */
505 k = gunzip_bk;
506 w = gunzip_outbuf_count; /* initialize gunzip_window position */
507 /* inflate the coded data */
508 ml = mask_bits[bl]; /* precompute masks for speed */
509 md = mask_bits[bd];
510}
511/* called once from inflate_get_next_window */
512static NOINLINE int inflate_codes(STATE_PARAM_ONLY)
513{
514 unsigned e; /* table entry flag/number of extra bits */
515 huft_t *t; /* pointer to table entry */
516
517 if (resume_copy)
518 goto do_copy;
519
520 while (1) { /* do until end of block */
521 bb = fill_bitbuffer(PASS_STATE bb, &k, bl);
522 t = tl + ((unsigned) bb & ml);
523 e = t->e;
524 if (e > 16)
525 do {
526 if (e == 99) {
527 abort_unzip(PASS_STATE_ONLY);
528 }
529 bb >>= t->b;
530 k -= t->b;
531 e -= 16;
532 bb = fill_bitbuffer(PASS_STATE bb, &k, e);
533 t = t->v.t + ((unsigned) bb & mask_bits[e]);
534 e = t->e;
535 } while (e > 16);
536 bb >>= t->b;
537 k -= t->b;
538 if (e == 16) { /* then it's a literal */
539 gunzip_window[w++] = (unsigned char) t->v.n;
540 if (w == GUNZIP_WSIZE) {
541 gunzip_outbuf_count = w;
542 //flush_gunzip_window();
543 w = 0;
544 return 1; // We have a block to read
545 }
546 } else { /* it's an EOB or a length */
547 /* exit if end of block */
548 if (e == 15) {
549 break;
550 }
551
552 /* get length of block to copy */
553 bb = fill_bitbuffer(PASS_STATE bb, &k, e);
554 nn = t->v.n + ((unsigned) bb & mask_bits[e]);
555 bb >>= e;
556 k -= e;
557
558 /* decode distance of block to copy */
559 bb = fill_bitbuffer(PASS_STATE bb, &k, bd);
560 t = td + ((unsigned) bb & md);
561 e = t->e;
562 if (e > 16)
563 do {
564 if (e == 99) {
565 abort_unzip(PASS_STATE_ONLY);
566 }
567 bb >>= t->b;
568 k -= t->b;
569 e -= 16;
570 bb = fill_bitbuffer(PASS_STATE bb, &k, e);
571 t = t->v.t + ((unsigned) bb & mask_bits[e]);
572 e = t->e;
573 } while (e > 16);
574 bb >>= t->b;
575 k -= t->b;
576 bb = fill_bitbuffer(PASS_STATE bb, &k, e);
577 dd = w - t->v.n - ((unsigned) bb & mask_bits[e]);
578 bb >>= e;
579 k -= e;
580
581 /* do the copy */
582 do_copy:
583 do {
584 /* Was: nn -= (e = (e = GUNZIP_WSIZE - ((dd &= GUNZIP_WSIZE - 1) > w ? dd : w)) > nn ? nn : e); */
585 /* Who wrote THAT?? rewritten as: */
586 unsigned delta;
587
588 dd &= GUNZIP_WSIZE - 1;
589 e = GUNZIP_WSIZE - (dd > w ? dd : w);
590 delta = w > dd ? w - dd : dd - w;
591 if (e > nn) e = nn;
592 nn -= e;
593
594 /* copy to new buffer to prevent possible overwrite */
595 if (delta >= e) {
596 memcpy(gunzip_window + w, gunzip_window + dd, e);
597 w += e;
598 dd += e;
599 } else {
600 /* do it slow to avoid memcpy() overlap */
601 /* !NOMEMCPY */
602 do {
603 gunzip_window[w++] = gunzip_window[dd++];
604 } while (--e);
605 }
606 if (w == GUNZIP_WSIZE) {
607 gunzip_outbuf_count = w;
608 resume_copy = (nn != 0);
609 //flush_gunzip_window();
610 w = 0;
611 return 1;
612 }
613 } while (nn);
614 resume_copy = 0;
615 }
616 }
617
618 /* restore the globals from the locals */
619 gunzip_outbuf_count = w; /* restore global gunzip_window pointer */
620 gunzip_bb = bb; /* restore global bit buffer */
621 gunzip_bk = k;
622
623 /* normally just after call to inflate_codes, but save code by putting it here */
624 /* free the decoding tables (tl and td), return */
625 huft_free_all(PASS_STATE_ONLY);
626
627 /* done */
628 return 0;
629}
630#undef ml
631#undef md
632#undef bb
633#undef k
634#undef w
635#undef tl
636#undef td
637#undef bl
638#undef bd
639#undef nn
640#undef dd
641
642
643/* called once from inflate_block */
644static void inflate_stored_setup(STATE_PARAM int my_n, int my_b, int my_k)
645{
646 inflate_stored_n = my_n;
647 inflate_stored_b = my_b;
648 inflate_stored_k = my_k;
649 /* initialize gunzip_window position */
650 inflate_stored_w = gunzip_outbuf_count;
651}
652/* called once from inflate_get_next_window */
653static int inflate_stored(STATE_PARAM_ONLY)
654{
655 /* read and output the compressed data */
656 while (inflate_stored_n--) {
657 inflate_stored_b = fill_bitbuffer(PASS_STATE inflate_stored_b, &inflate_stored_k, 8);
658 gunzip_window[inflate_stored_w++] = (unsigned char) inflate_stored_b;
659 if (inflate_stored_w == GUNZIP_WSIZE) {
660 gunzip_outbuf_count = inflate_stored_w;
661 //flush_gunzip_window();
662 inflate_stored_w = 0;
663 inflate_stored_b >>= 8;
664 inflate_stored_k -= 8;
665 return 1; /* We have a block */
666 }
667 inflate_stored_b >>= 8;
668 inflate_stored_k -= 8;
669 }
670
671 /* restore the globals from the locals */
672 gunzip_outbuf_count = inflate_stored_w; /* restore global gunzip_window pointer */
673 gunzip_bb = inflate_stored_b; /* restore global bit buffer */
674 gunzip_bk = inflate_stored_k;
675 return 0; /* Finished */
676}
677
678
679/*
680 * decompress an inflated block
681 * e: last block flag
682 *
683 * GLOBAL VARIABLES: bb, kk,
684 */
685/* Return values: -1 = inflate_stored, -2 = inflate_codes */
686/* One callsite in inflate_get_next_window */
687static int inflate_block(STATE_PARAM smallint *e)
688{
689 unsigned ll[286 + 30]; /* literal/length and distance code lengths */
690 unsigned t; /* block type */
691 unsigned b; /* bit buffer */
692 unsigned k; /* number of bits in bit buffer */
693
694 /* make local bit buffer */
695
696 b = gunzip_bb;
697 k = gunzip_bk;
698
699 /* read in last block bit */
700 b = fill_bitbuffer(PASS_STATE b, &k, 1);
701 *e = b & 1;
702 b >>= 1;
703 k -= 1;
704
705 /* read in block type */
706 b = fill_bitbuffer(PASS_STATE b, &k, 2);
707 t = (unsigned) b & 3;
708 b >>= 2;
709 k -= 2;
710
711 /* restore the global bit buffer */
712 gunzip_bb = b;
713 gunzip_bk = k;
714
715 /* Do we see block type 1 often? Yes!
716 * TODO: fix performance problem (see below) */
717 //bb_error_msg("blktype %d", t);
718
719 /* inflate that block type */
720 switch (t) {
721 case 0: /* Inflate stored */
722 {
723 unsigned n; /* number of bytes in block */
724 unsigned b_stored; /* bit buffer */
725 unsigned k_stored; /* number of bits in bit buffer */
726
727 /* make local copies of globals */
728 b_stored = gunzip_bb; /* initialize bit buffer */
729 k_stored = gunzip_bk;
730
731 /* go to byte boundary */
732 n = k_stored & 7;
733 b_stored >>= n;
734 k_stored -= n;
735
736 /* get the length and its complement */
737 b_stored = fill_bitbuffer(PASS_STATE b_stored, &k_stored, 16);
738 n = ((unsigned) b_stored & 0xffff);
739 b_stored >>= 16;
740 k_stored -= 16;
741
742 b_stored = fill_bitbuffer(PASS_STATE b_stored, &k_stored, 16);
743 if (n != (unsigned) ((~b_stored) & 0xffff)) {
744 abort_unzip(PASS_STATE_ONLY); /* error in compressed data */
745 }
746 b_stored >>= 16;
747 k_stored -= 16;
748
749 inflate_stored_setup(PASS_STATE n, b_stored, k_stored);
750
751 return -1;
752 }
753 case 1:
754 /* Inflate fixed
755 * decompress an inflated type 1 (fixed Huffman codes) block. We should
756 * either replace this with a custom decoder, or at least precompute the
757 * Huffman tables. TODO */
758 {
759 int i; /* temporary variable */
760 unsigned bl; /* lookup bits for tl */
761 unsigned bd; /* lookup bits for td */
762 /* gcc 4.2.1 is too dumb to reuse stackspace. Moved up... */
763 //unsigned ll[288]; /* length list for huft_build */
764
765 /* set up literal table */
766 for (i = 0; i < 144; i++)
767 ll[i] = 8;
768 for (; i < 256; i++)
769 ll[i] = 9;
770 for (; i < 280; i++)
771 ll[i] = 7;
772 for (; i < 288; i++) /* make a complete, but wrong code set */
773 ll[i] = 8;
774 bl = 7;
775 huft_build(ll, 288, 257, cplens, cplext, &inflate_codes_tl, &bl);
776 /* huft_build() never return nonzero - we use known data */
777
778 /* set up distance table */
779 for (i = 0; i < 30; i++) /* make an incomplete code set */
780 ll[i] = 5;
781 bd = 5;
782 huft_build(ll, 30, 0, cpdist, cpdext, &inflate_codes_td, &bd);
783
784 /* set up data for inflate_codes() */
785 inflate_codes_setup(PASS_STATE bl, bd);
786
787 /* huft_free code moved into inflate_codes */
788
789 return -2;
790 }
791 case 2: /* Inflate dynamic */
792 {
793 enum { dbits = 6 }; /* bits in base distance lookup table */
794 enum { lbits = 9 }; /* bits in base literal/length lookup table */
795
796 huft_t *td; /* distance code table */
797 unsigned i; /* temporary variables */
798 unsigned j;
799 unsigned l; /* last length */
800 unsigned m; /* mask for bit lengths table */
801 unsigned n; /* number of lengths to get */
802 unsigned bl; /* lookup bits for tl */
803 unsigned bd; /* lookup bits for td */
804 unsigned nb; /* number of bit length codes */
805 unsigned nl; /* number of literal/length codes */
806 unsigned nd; /* number of distance codes */
807
808 //unsigned ll[286 + 30];/* literal/length and distance code lengths */
809 unsigned b_dynamic; /* bit buffer */
810 unsigned k_dynamic; /* number of bits in bit buffer */
811
812 /* make local bit buffer */
813 b_dynamic = gunzip_bb;
814 k_dynamic = gunzip_bk;
815
816 /* read in table lengths */
817 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 5);
818 nl = 257 + ((unsigned) b_dynamic & 0x1f); /* number of literal/length codes */
819
820 b_dynamic >>= 5;
821 k_dynamic -= 5;
822 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 5);
823 nd = 1 + ((unsigned) b_dynamic & 0x1f); /* number of distance codes */
824
825 b_dynamic >>= 5;
826 k_dynamic -= 5;
827 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 4);
828 nb = 4 + ((unsigned) b_dynamic & 0xf); /* number of bit length codes */
829
830 b_dynamic >>= 4;
831 k_dynamic -= 4;
832 if (nl > 286 || nd > 30) {
833 abort_unzip(PASS_STATE_ONLY); /* bad lengths */
834 }
835
836 /* read in bit-length-code lengths */
837 for (j = 0; j < nb; j++) {
838 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 3);
839 ll[border[j]] = (unsigned) b_dynamic & 7;
840 b_dynamic >>= 3;
841 k_dynamic -= 3;
842 }
843 for (; j < 19; j++)
844 ll[border[j]] = 0;
845
846 /* build decoding table for trees - single level, 7 bit lookup */
847 bl = 7;
848 i = huft_build(ll, 19, 19, NULL, NULL, &inflate_codes_tl, &bl);
849 if (i != 0) {
850 abort_unzip(PASS_STATE_ONLY); //return i; /* incomplete code set */
851 }
852
853 /* read in literal and distance code lengths */
854 n = nl + nd;
855 m = mask_bits[bl];
856 i = l = 0;
857 while ((unsigned) i < n) {
858 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, (unsigned)bl);
859 td = inflate_codes_tl + ((unsigned) b_dynamic & m);
860 j = td->b;
861 b_dynamic >>= j;
862 k_dynamic -= j;
863 j = td->v.n;
864 if (j < 16) { /* length of code in bits (0..15) */
865 ll[i++] = l = j; /* save last length in l */
866 } else if (j == 16) { /* repeat last length 3 to 6 times */
867 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 2);
868 j = 3 + ((unsigned) b_dynamic & 3);
869 b_dynamic >>= 2;
870 k_dynamic -= 2;
871 if ((unsigned) i + j > n) {
872 abort_unzip(PASS_STATE_ONLY); //return 1;
873 }
874 while (j--) {
875 ll[i++] = l;
876 }
877 } else if (j == 17) { /* 3 to 10 zero length codes */
878 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 3);
879 j = 3 + ((unsigned) b_dynamic & 7);
880 b_dynamic >>= 3;
881 k_dynamic -= 3;
882 if ((unsigned) i + j > n) {
883 abort_unzip(PASS_STATE_ONLY); //return 1;
884 }
885 while (j--) {
886 ll[i++] = 0;
887 }
888 l = 0;
889 } else { /* j == 18: 11 to 138 zero length codes */
890 b_dynamic = fill_bitbuffer(PASS_STATE b_dynamic, &k_dynamic, 7);
891 j = 11 + ((unsigned) b_dynamic & 0x7f);
892 b_dynamic >>= 7;
893 k_dynamic -= 7;
894 if ((unsigned) i + j > n) {
895 abort_unzip(PASS_STATE_ONLY); //return 1;
896 }
897 while (j--) {
898 ll[i++] = 0;
899 }
900 l = 0;
901 }
902 }
903
904 /* free decoding table for trees */
905 huft_free(inflate_codes_tl);
906
907 /* restore the global bit buffer */
908 gunzip_bb = b_dynamic;
909 gunzip_bk = k_dynamic;
910
911 /* build the decoding tables for literal/length and distance codes */
912 bl = lbits;
913
914 i = huft_build(ll, nl, 257, cplens, cplext, &inflate_codes_tl, &bl);
915 if (i != 0) {
916 abort_unzip(PASS_STATE_ONLY);
917 }
918 bd = dbits;
919 i = huft_build(ll + nl, nd, 0, cpdist, cpdext, &inflate_codes_td, &bd);
920 if (i != 0) {
921 abort_unzip(PASS_STATE_ONLY);
922 }
923
924 /* set up data for inflate_codes() */
925 inflate_codes_setup(PASS_STATE bl, bd);
926
927 /* huft_free code moved into inflate_codes */
928
929 return -2;
930 }
931 default:
932 abort_unzip(PASS_STATE_ONLY);
933 }
934}
935
936/* Two callsites, both in inflate_get_next_window */
937static void calculate_gunzip_crc(STATE_PARAM_ONLY)
938{
939 gunzip_crc = crc32_block_endian0(gunzip_crc, gunzip_window, gunzip_outbuf_count, gunzip_crc_table);
940 gunzip_bytes_out += gunzip_outbuf_count;
941}
942
943/* One callsite in inflate_unzip_internal */
944static int inflate_get_next_window(STATE_PARAM_ONLY)
945{
946 gunzip_outbuf_count = 0;
947
948 while (1) {
949 int ret;
950
951 if (need_another_block) {
952 if (end_reached) {
953 calculate_gunzip_crc(PASS_STATE_ONLY);
954 end_reached = 0;
955 /* NB: need_another_block is still set */
956 return 0; /* Last block */
957 }
958 method = inflate_block(PASS_STATE &end_reached);
959 need_another_block = 0;
960 }
961
962 switch (method) {
963 case -1:
964 ret = inflate_stored(PASS_STATE_ONLY);
965 break;
966 case -2:
967 ret = inflate_codes(PASS_STATE_ONLY);
968 break;
969 default: /* cannot happen */
970 abort_unzip(PASS_STATE_ONLY);
971 }
972
973 if (ret == 1) {
974 calculate_gunzip_crc(PASS_STATE_ONLY);
975 return 1; /* more data left */
976 }
977 need_another_block = 1; /* end of that block */
978 }
979 /* Doesnt get here */
980}
981
982
983/* Called from unpack_gz_stream() and inflate_unzip() */
984static IF_DESKTOP(long long) int
985inflate_unzip_internal(STATE_PARAM transformer_state_t *xstate)
986{
987 IF_DESKTOP(long long) int n = 0;
988 ssize_t nwrote;
989
990 /* Allocate all global buffers (for DYN_ALLOC option) */
991 gunzip_window = xmalloc(GUNZIP_WSIZE);
992 gunzip_outbuf_count = 0;
993 gunzip_bytes_out = 0;
994 gunzip_src_fd = xstate->src_fd;
995
996 /* (re) initialize state */
997 method = -1;
998 need_another_block = 1;
999 resume_copy = 0;
1000 gunzip_bk = 0;
1001 gunzip_bb = 0;
1002
1003 /* Create the crc table */
1004 gunzip_crc_table = crc32_filltable(NULL, 0);
1005 gunzip_crc = ~0;
1006
1007 error_msg = "corrupted data";
1008 if (setjmp(error_jmp)) {
1009 /* Error from deep inside zip machinery */
1010 n = -1;
1011 goto ret;
1012 }
1013
1014 while (1) {
1015 int r = inflate_get_next_window(PASS_STATE_ONLY);
1016 nwrote = transformer_write(xstate, gunzip_window, gunzip_outbuf_count);
1017 if (nwrote == (ssize_t)-1) {
1018 n = -1;
1019 goto ret;
1020 }
1021 IF_DESKTOP(n += nwrote;)
1022 if (r == 0) break;
1023 }
1024
1025 /* Store unused bytes in a global buffer so calling applets can access it */
1026 if (gunzip_bk >= 8) {
1027 /* Undo too much lookahead. The next read will be byte aligned
1028 * so we can discard unused bits in the last meaningful byte. */
1029 bytebuffer_offset--;
1030 bytebuffer[bytebuffer_offset] = gunzip_bb & 0xff;
1031 gunzip_bb >>= 8;
1032 gunzip_bk -= 8;
1033 }
1034 ret:
1035 /* Cleanup */
1036 free(gunzip_window);
1037 free(gunzip_crc_table);
1038 return n;
1039}
1040
1041
1042/* External entry points */
1043
1044/* For unzip */
1045
1046IF_DESKTOP(long long) int FAST_FUNC
1047inflate_unzip(transformer_state_t *xstate)
1048{
1049 IF_DESKTOP(long long) int n;
1050 DECLARE_STATE;
1051
1052 ALLOC_STATE;
1053
1054 to_read = xstate->bytes_in;
1055// bytebuffer_max = 0x8000;
1056 bytebuffer_offset = 4;
1057 bytebuffer = xmalloc(bytebuffer_max);
1058 n = inflate_unzip_internal(PASS_STATE xstate);
1059 free(bytebuffer);
1060
1061 xstate->crc32 = gunzip_crc;
1062 xstate->bytes_out = gunzip_bytes_out;
1063 DEALLOC_STATE;
1064 return n;
1065}
1066
1067
1068/* For gunzip */
1069
1070/* helpers first */
1071
1072/* Top up the input buffer with at least n bytes. */
1073static int top_up(STATE_PARAM unsigned n)
1074{
1075 int count = bytebuffer_size - bytebuffer_offset;
1076
1077 if (count < (int)n) {
1078 memmove(bytebuffer, &bytebuffer[bytebuffer_offset], count);
1079 bytebuffer_offset = 0;
1080 bytebuffer_size = full_read(gunzip_src_fd, &bytebuffer[count], bytebuffer_max - count);
1081 if ((int)bytebuffer_size < 0) {
1082 bb_error_msg(bb_msg_read_error);
1083 return 0;
1084 }
1085 bytebuffer_size += count;
1086 if (bytebuffer_size < n)
1087 return 0;
1088 }
1089 return 1;
1090}
1091
1092static uint16_t buffer_read_le_u16(STATE_PARAM_ONLY)
1093{
1094 uint16_t res;
1095#if BB_LITTLE_ENDIAN
1096 move_from_unaligned16(res, &bytebuffer[bytebuffer_offset]);
1097#else
1098 res = bytebuffer[bytebuffer_offset];
1099 res |= bytebuffer[bytebuffer_offset + 1] << 8;
1100#endif
1101 bytebuffer_offset += 2;
1102 return res;
1103}
1104
1105static uint32_t buffer_read_le_u32(STATE_PARAM_ONLY)
1106{
1107 uint32_t res;
1108#if BB_LITTLE_ENDIAN
1109 move_from_unaligned32(res, &bytebuffer[bytebuffer_offset]);
1110#else
1111 res = bytebuffer[bytebuffer_offset];
1112 res |= bytebuffer[bytebuffer_offset + 1] << 8;
1113 res |= bytebuffer[bytebuffer_offset + 2] << 16;
1114 res |= bytebuffer[bytebuffer_offset + 3] << 24;
1115#endif
1116 bytebuffer_offset += 4;
1117 return res;
1118}
1119
1120static int check_header_gzip(STATE_PARAM transformer_state_t *xstate)
1121{
1122 union {
1123 unsigned char raw[8];
1124 struct {
1125 uint8_t gz_method;
1126 uint8_t flags;
1127 uint32_t mtime;
1128 uint8_t xtra_flags_UNUSED;
1129 uint8_t os_flags_UNUSED;
1130 } PACKED formatted;
1131 } header;
1132
1133 BUILD_BUG_ON(sizeof(header) != 8);
1134
1135 /*
1136 * Rewind bytebuffer. We use the beginning because the header has 8
1137 * bytes, leaving enough for unwinding afterwards.
1138 */
1139 bytebuffer_size -= bytebuffer_offset;
1140 memmove(bytebuffer, &bytebuffer[bytebuffer_offset], bytebuffer_size);
1141 bytebuffer_offset = 0;
1142
1143 if (!top_up(PASS_STATE 8))
1144 return 0;
1145 memcpy(header.raw, &bytebuffer[bytebuffer_offset], 8);
1146 bytebuffer_offset += 8;
1147
1148 /* Check the compression method */
1149 if (header.formatted.gz_method != 8) {
1150 return 0;
1151 }
1152
1153 if (header.formatted.flags & 0x04) {
1154 /* bit 2 set: extra field present */
1155 unsigned extra_short;
1156
1157 if (!top_up(PASS_STATE 2))
1158 return 0;
1159 extra_short = buffer_read_le_u16(PASS_STATE_ONLY);
1160 if (!top_up(PASS_STATE extra_short))
1161 return 0;
1162 /* Ignore extra field */
1163 bytebuffer_offset += extra_short;
1164 }
1165
1166 /* Discard original name and file comment if any */
1167 /* bit 3 set: original file name present */
1168 /* bit 4 set: file comment present */
1169 if (header.formatted.flags & 0x18) {
1170 while (1) {
1171 do {
1172 if (!top_up(PASS_STATE 1))
1173 return 0;
1174 } while (bytebuffer[bytebuffer_offset++] != 0);
1175 if ((header.formatted.flags & 0x18) != 0x18)
1176 break;
1177 header.formatted.flags &= ~0x18;
1178 }
1179 }
1180
1181 xstate->mtime = SWAP_LE32(header.formatted.mtime);
1182
1183 /* Read the header checksum */
1184 if (header.formatted.flags & 0x02) {
1185 if (!top_up(PASS_STATE 2))
1186 return 0;
1187 bytebuffer_offset += 2;
1188 }
1189 return 1;
1190}
1191
1192IF_DESKTOP(long long) int FAST_FUNC
1193unpack_gz_stream(transformer_state_t *xstate)
1194{
1195 uint32_t v32;
1196 IF_DESKTOP(long long) int total, n;
1197 DECLARE_STATE;
1198
1199#if !ENABLE_FEATURE_SEAMLESS_Z
1200 if (check_signature16(xstate, GZIP_MAGIC))
1201 return -1;
1202#else
1203 if (!xstate->signature_skipped) {
1204 uint16_t magic2;
1205
1206 if (full_read(xstate->src_fd, &magic2, 2) != 2) {
1207 bad_magic:
1208 bb_error_msg("invalid magic");
1209 return -1;
1210 }
1211 if (magic2 == COMPRESS_MAGIC) {
1212 xstate->signature_skipped = 2;
1213 return unpack_Z_stream(xstate);
1214 }
1215 if (magic2 != GZIP_MAGIC)
1216 goto bad_magic;
1217 }
1218#endif
1219
1220 total = 0;
1221
1222 ALLOC_STATE;
1223 to_read = -1;
1224// bytebuffer_max = 0x8000;
1225 bytebuffer = xmalloc(bytebuffer_max);
1226 gunzip_src_fd = xstate->src_fd;
1227
1228 again:
1229 if (!check_header_gzip(PASS_STATE xstate)) {
1230 bb_error_msg("corrupted data");
1231 total = -1;
1232 goto ret;
1233 }
1234
1235 n = inflate_unzip_internal(PASS_STATE xstate);
1236 if (n < 0) {
1237 total = -1;
1238 goto ret;
1239 }
1240 total += n;
1241
1242 if (!top_up(PASS_STATE 8)) {
1243 bb_error_msg("corrupted data");
1244 total = -1;
1245 goto ret;
1246 }
1247
1248 /* Validate decompression - crc */
1249 v32 = buffer_read_le_u32(PASS_STATE_ONLY);
1250 if ((~gunzip_crc) != v32) {
1251 bb_error_msg("crc error");
1252 total = -1;
1253 goto ret;
1254 }
1255
1256 /* Validate decompression - size */
1257 v32 = buffer_read_le_u32(PASS_STATE_ONLY);
1258 if ((uint32_t)gunzip_bytes_out != v32) {
1259 bb_error_msg("incorrect length");
1260 total = -1;
1261 }
1262
1263 if (!top_up(PASS_STATE 2))
1264 goto ret; /* EOF */
1265
1266 if (bytebuffer[bytebuffer_offset] == 0x1f
1267 && bytebuffer[bytebuffer_offset + 1] == 0x8b
1268 ) {
1269 bytebuffer_offset += 2;
1270 goto again;
1271 }
1272 /* GNU gzip says: */
1273 /*bb_error_msg("decompression OK, trailing garbage ignored");*/
1274
1275 ret:
1276 free(bytebuffer);
1277 DEALLOC_STATE;
1278 return total;
1279}
1280