blob: 1f0b70fbf2917df502f8ea9e4a58116bbcb78bda
1 | /* vi: set sw=4 ts=4: */ |
2 | /* |
3 | * Gzip implementation for busybox |
4 | * |
5 | * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly. |
6 | * |
7 | * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com> |
8 | * "this is a stripped down version of gzip I put into busybox, it does |
9 | * only standard in to standard out with -9 compression. It also requires |
10 | * the zcat module for some important functions." |
11 | * |
12 | * Adjusted further by Erik Andersen <andersen@codepoet.org> to support |
13 | * files as well as stdin/stdout, and to generally behave itself wrt |
14 | * command line handling. |
15 | * |
16 | * Licensed under GPLv2 or later, see file LICENSE in this source tree. |
17 | */ |
18 | /* big objects in bss: |
19 | * 00000020 b bl_count |
20 | * 00000074 b base_length |
21 | * 00000078 b base_dist |
22 | * 00000078 b static_dtree |
23 | * 0000009c b bl_tree |
24 | * 000000f4 b dyn_dtree |
25 | * 00000100 b length_code |
26 | * 00000200 b dist_code |
27 | * 0000023d b depth |
28 | * 00000400 b flag_buf |
29 | * 0000047a b heap |
30 | * 00000480 b static_ltree |
31 | * 000008f4 b dyn_ltree |
32 | */ |
33 | /* TODO: full support for -v for DESKTOP |
34 | * "/usr/bin/gzip -v a bogus aa" should say: |
35 | a: 85.1% -- replaced with a.gz |
36 | gzip: bogus: No such file or directory |
37 | aa: 85.1% -- replaced with aa.gz |
38 | */ |
39 | |
40 | //config:config GZIP |
41 | //config: bool "gzip" |
42 | //config: default y |
43 | //config: help |
44 | //config: gzip is used to compress files. |
45 | //config: It's probably the most widely used UNIX compression program. |
46 | //config: |
47 | //config:config FEATURE_GZIP_LONG_OPTIONS |
48 | //config: bool "Enable long options" |
49 | //config: default y |
50 | //config: depends on GZIP && LONG_OPTS |
51 | //config: help |
52 | //config: Enable use of long options, increases size by about 106 Bytes |
53 | //config: |
54 | //config:config GZIP_FAST |
55 | //config: int "Trade memory for gzip speed (0:small,slow - 2:fast,big)" |
56 | //config: default 0 |
57 | //config: range 0 2 |
58 | //config: depends on GZIP |
59 | //config: help |
60 | //config: Enable big memory options for gzip. |
61 | //config: 0: small buffers, small hash-tables |
62 | //config: 1: larger buffers, larger hash-tables |
63 | //config: 2: larger buffers, largest hash-tables |
64 | //config: Larger models may give slightly better compression |
65 | //config: |
66 | //config:config FEATURE_GZIP_LEVELS |
67 | //config: bool "Enable compression levels" |
68 | //config: default n |
69 | //config: depends on GZIP |
70 | //config: help |
71 | //config: Enable support for compression levels 4-9. The default level |
72 | //config: is 6. If levels 1-3 are specified, 4 is used. |
73 | //config: If this option is not selected, -N options are ignored and -9 |
74 | //config: is used. |
75 | |
76 | //applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP)) |
77 | //kbuild:lib-$(CONFIG_GZIP) += gzip.o |
78 | |
79 | //usage:#define gzip_trivial_usage |
80 | //usage: "[-cf" IF_GUNZIP("d") IF_FEATURE_GZIP_LEVELS("123456789") "] [FILE]..." |
81 | //usage:#define gzip_full_usage "\n\n" |
82 | //usage: "Compress FILEs (or stdin)\n" |
83 | //usage: IF_FEATURE_GZIP_LEVELS( |
84 | //usage: "\n -1..9 Compression level" |
85 | //usage: ) |
86 | //usage: IF_GUNZIP( |
87 | //usage: "\n -d Decompress" |
88 | //usage: ) |
89 | //usage: "\n -c Write to stdout" |
90 | //usage: "\n -f Force" |
91 | //usage: |
92 | //usage:#define gzip_example_usage |
93 | //usage: "$ ls -la /tmp/busybox*\n" |
94 | //usage: "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n" |
95 | //usage: "$ gzip /tmp/busybox.tar\n" |
96 | //usage: "$ ls -la /tmp/busybox*\n" |
97 | //usage: "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n" |
98 | |
99 | #include "libbb.h" |
100 | #include "bb_archive.h" |
101 | #include <strings.h> |
102 | |
103 | |
104 | /* =========================================================================== |
105 | */ |
106 | //#define DEBUG 1 |
107 | /* Diagnostic functions */ |
108 | #ifdef DEBUG |
109 | # define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); } |
110 | # define Trace(x) fprintf x |
111 | # define Tracev(x) {if (verbose) fprintf x; } |
112 | # define Tracevv(x) {if (verbose > 1) fprintf x; } |
113 | # define Tracec(c,x) {if (verbose && (c)) fprintf x; } |
114 | # define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; } |
115 | #else |
116 | # define Assert(cond,msg) |
117 | # define Trace(x) |
118 | # define Tracev(x) |
119 | # define Tracevv(x) |
120 | # define Tracec(c,x) |
121 | # define Tracecv(c,x) |
122 | #endif |
123 | |
124 | |
125 | /* =========================================================================== |
126 | */ |
127 | #if CONFIG_GZIP_FAST == 0 |
128 | # define SMALL_MEM |
129 | #elif CONFIG_GZIP_FAST == 1 |
130 | # define MEDIUM_MEM |
131 | #elif CONFIG_GZIP_FAST == 2 |
132 | # define BIG_MEM |
133 | #else |
134 | # error "Invalid CONFIG_GZIP_FAST value" |
135 | #endif |
136 | |
137 | #ifndef INBUFSIZ |
138 | # ifdef SMALL_MEM |
139 | # define INBUFSIZ 0x2000 /* input buffer size */ |
140 | # else |
141 | # define INBUFSIZ 0x8000 /* input buffer size */ |
142 | # endif |
143 | #endif |
144 | |
145 | #ifndef OUTBUFSIZ |
146 | # ifdef SMALL_MEM |
147 | # define OUTBUFSIZ 8192 /* output buffer size */ |
148 | # else |
149 | # define OUTBUFSIZ 16384 /* output buffer size */ |
150 | # endif |
151 | #endif |
152 | |
153 | #ifndef DIST_BUFSIZE |
154 | # ifdef SMALL_MEM |
155 | # define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */ |
156 | # else |
157 | # define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */ |
158 | # endif |
159 | #endif |
160 | |
161 | /* gzip flag byte */ |
162 | #define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */ |
163 | #define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */ |
164 | #define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */ |
165 | #define ORIG_NAME 0x08 /* bit 3 set: original file name present */ |
166 | #define COMMENT 0x10 /* bit 4 set: file comment present */ |
167 | #define RESERVED 0xC0 /* bit 6,7: reserved */ |
168 | |
169 | /* internal file attribute */ |
170 | #define UNKNOWN 0xffff |
171 | #define BINARY 0 |
172 | #define ASCII 1 |
173 | |
174 | #ifndef WSIZE |
175 | # define WSIZE 0x8000 /* window size--must be a power of two, and */ |
176 | #endif /* at least 32K for zip's deflate method */ |
177 | |
178 | #define MIN_MATCH 3 |
179 | #define MAX_MATCH 258 |
180 | /* The minimum and maximum match lengths */ |
181 | |
182 | #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) |
183 | /* Minimum amount of lookahead, except at the end of the input file. |
184 | * See deflate.c for comments about the MIN_MATCH+1. |
185 | */ |
186 | |
187 | #define MAX_DIST (WSIZE-MIN_LOOKAHEAD) |
188 | /* In order to simplify the code, particularly on 16 bit machines, match |
189 | * distances are limited to MAX_DIST instead of WSIZE. |
190 | */ |
191 | |
192 | #ifndef MAX_PATH_LEN |
193 | # define MAX_PATH_LEN 1024 /* max pathname length */ |
194 | #endif |
195 | |
196 | #define seekable() 0 /* force sequential output */ |
197 | #define translate_eol 0 /* no option -a yet */ |
198 | |
199 | #ifndef BITS |
200 | # define BITS 16 |
201 | #endif |
202 | #define INIT_BITS 9 /* Initial number of bits per code */ |
203 | |
204 | #define BIT_MASK 0x1f /* Mask for 'number of compression bits' */ |
205 | /* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free. |
206 | * It's a pity that old uncompress does not check bit 0x20. That makes |
207 | * extension of the format actually undesirable because old compress |
208 | * would just crash on the new format instead of giving a meaningful |
209 | * error message. It does check the number of bits, but it's more |
210 | * helpful to say "unsupported format, get a new version" than |
211 | * "can only handle 16 bits". |
212 | */ |
213 | |
214 | #ifdef MAX_EXT_CHARS |
215 | # define MAX_SUFFIX MAX_EXT_CHARS |
216 | #else |
217 | # define MAX_SUFFIX 30 |
218 | #endif |
219 | |
220 | |
221 | /* =========================================================================== |
222 | * Compile with MEDIUM_MEM to reduce the memory requirements or |
223 | * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the |
224 | * entire input file can be held in memory (not possible on 16 bit systems). |
225 | * Warning: defining these symbols affects HASH_BITS (see below) and thus |
226 | * affects the compression ratio. The compressed output |
227 | * is still correct, and might even be smaller in some cases. |
228 | */ |
229 | |
230 | #ifdef SMALL_MEM |
231 | # define HASH_BITS 13 /* Number of bits used to hash strings */ |
232 | #endif |
233 | #ifdef MEDIUM_MEM |
234 | # define HASH_BITS 14 |
235 | #endif |
236 | #ifndef HASH_BITS |
237 | # define HASH_BITS 15 |
238 | /* For portability to 16 bit machines, do not use values above 15. */ |
239 | #endif |
240 | |
241 | #define HASH_SIZE (unsigned)(1<<HASH_BITS) |
242 | #define HASH_MASK (HASH_SIZE-1) |
243 | #define WMASK (WSIZE-1) |
244 | /* HASH_SIZE and WSIZE must be powers of two */ |
245 | #ifndef TOO_FAR |
246 | # define TOO_FAR 4096 |
247 | #endif |
248 | /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ |
249 | |
250 | |
251 | /* =========================================================================== |
252 | * These types are not really 'char', 'short' and 'long' |
253 | */ |
254 | typedef uint8_t uch; |
255 | typedef uint16_t ush; |
256 | typedef uint32_t ulg; |
257 | typedef int32_t lng; |
258 | |
259 | typedef ush Pos; |
260 | typedef unsigned IPos; |
261 | /* A Pos is an index in the character window. We use short instead of int to |
262 | * save space in the various tables. IPos is used only for parameter passing. |
263 | */ |
264 | |
265 | enum { |
266 | WINDOW_SIZE = 2 * WSIZE, |
267 | /* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the |
268 | * input file length plus MIN_LOOKAHEAD. |
269 | */ |
270 | |
271 | #ifndef ENABLE_FEATURE_GZIP_LEVELS |
272 | |
273 | max_chain_length = 4096, |
274 | /* To speed up deflation, hash chains are never searched beyond this length. |
275 | * A higher limit improves compression ratio but degrades the speed. |
276 | */ |
277 | |
278 | max_lazy_match = 258, |
279 | /* Attempt to find a better match only when the current match is strictly |
280 | * smaller than this value. This mechanism is used only for compression |
281 | * levels >= 4. |
282 | */ |
283 | |
284 | max_insert_length = max_lazy_match, |
285 | /* Insert new strings in the hash table only if the match length |
286 | * is not greater than this length. This saves time but degrades compression. |
287 | * max_insert_length is used only for compression levels <= 3. |
288 | */ |
289 | |
290 | good_match = 32, |
291 | /* Use a faster search when the previous match is longer than this */ |
292 | |
293 | /* Values for max_lazy_match, good_match and max_chain_length, depending on |
294 | * the desired pack level (0..9). The values given below have been tuned to |
295 | * exclude worst case performance for pathological files. Better values may be |
296 | * found for specific files. |
297 | */ |
298 | |
299 | nice_match = 258, /* Stop searching when current match exceeds this */ |
300 | /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 |
301 | * For deflate_fast() (levels <= 3) good is ignored and lazy has a different |
302 | * meaning. |
303 | */ |
304 | #endif /* ENABLE_FEATURE_GZIP_LEVELS */ |
305 | }; |
306 | |
307 | |
308 | struct globals { |
309 | |
310 | #ifdef ENABLE_FEATURE_GZIP_LEVELS |
311 | unsigned max_chain_length; |
312 | unsigned max_lazy_match; |
313 | unsigned good_match; |
314 | unsigned nice_match; |
315 | #define max_chain_length (G1.max_chain_length) |
316 | #define max_lazy_match (G1.max_lazy_match) |
317 | #define good_match (G1.good_match) |
318 | #define nice_match (G1.nice_match) |
319 | #endif |
320 | |
321 | lng block_start; |
322 | |
323 | /* window position at the beginning of the current output block. Gets |
324 | * negative when the window is moved backwards. |
325 | */ |
326 | unsigned ins_h; /* hash index of string to be inserted */ |
327 | |
328 | #define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH) |
329 | /* Number of bits by which ins_h and del_h must be shifted at each |
330 | * input step. It must be such that after MIN_MATCH steps, the oldest |
331 | * byte no longer takes part in the hash key, that is: |
332 | * H_SHIFT * MIN_MATCH >= HASH_BITS |
333 | */ |
334 | |
335 | unsigned prev_length; |
336 | |
337 | /* Length of the best match at previous step. Matches not greater than this |
338 | * are discarded. This is used in the lazy match evaluation. |
339 | */ |
340 | |
341 | unsigned strstart; /* start of string to insert */ |
342 | unsigned match_start; /* start of matching string */ |
343 | unsigned lookahead; /* number of valid bytes ahead in window */ |
344 | |
345 | /* =========================================================================== |
346 | */ |
347 | #define DECLARE(type, array, size) \ |
348 | type * array |
349 | #define ALLOC(type, array, size) \ |
350 | array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type)) |
351 | #define FREE(array) \ |
352 | do { free(array); array = NULL; } while (0) |
353 | |
354 | /* global buffers */ |
355 | |
356 | /* buffer for literals or lengths */ |
357 | /* DECLARE(uch, l_buf, LIT_BUFSIZE); */ |
358 | DECLARE(uch, l_buf, INBUFSIZ); |
359 | |
360 | DECLARE(ush, d_buf, DIST_BUFSIZE); |
361 | DECLARE(uch, outbuf, OUTBUFSIZ); |
362 | |
363 | /* Sliding window. Input bytes are read into the second half of the window, |
364 | * and move to the first half later to keep a dictionary of at least WSIZE |
365 | * bytes. With this organization, matches are limited to a distance of |
366 | * WSIZE-MAX_MATCH bytes, but this ensures that IO is always |
367 | * performed with a length multiple of the block size. Also, it limits |
368 | * the window size to 64K, which is quite useful on MSDOS. |
369 | * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would |
370 | * be less efficient). |
371 | */ |
372 | DECLARE(uch, window, 2L * WSIZE); |
373 | |
374 | /* Link to older string with same hash index. To limit the size of this |
375 | * array to 64K, this link is maintained only for the last 32K strings. |
376 | * An index in this array is thus a window index modulo 32K. |
377 | */ |
378 | /* DECLARE(Pos, prev, WSIZE); */ |
379 | DECLARE(ush, prev, 1L << BITS); |
380 | |
381 | /* Heads of the hash chains or 0. */ |
382 | /* DECLARE(Pos, head, 1<<HASH_BITS); */ |
383 | #define head (G1.prev + WSIZE) /* hash head (see deflate.c) */ |
384 | |
385 | /* number of input bytes */ |
386 | ulg isize; /* only 32 bits stored in .gz file */ |
387 | |
388 | /* bbox always use stdin/stdout */ |
389 | #define ifd STDIN_FILENO /* input file descriptor */ |
390 | #define ofd STDOUT_FILENO /* output file descriptor */ |
391 | |
392 | #ifdef DEBUG |
393 | unsigned insize; /* valid bytes in l_buf */ |
394 | #endif |
395 | unsigned outcnt; /* bytes in output buffer */ |
396 | |
397 | smallint eofile; /* flag set at end of input file */ |
398 | |
399 | /* =========================================================================== |
400 | * Local data used by the "bit string" routines. |
401 | */ |
402 | |
403 | unsigned short bi_buf; |
404 | |
405 | /* Output buffer. bits are inserted starting at the bottom (least significant |
406 | * bits). |
407 | */ |
408 | |
409 | #undef BUF_SIZE |
410 | #define BUF_SIZE (8 * sizeof(G1.bi_buf)) |
411 | /* Number of bits used within bi_buf. (bi_buf might be implemented on |
412 | * more than 16 bits on some systems.) |
413 | */ |
414 | |
415 | int bi_valid; |
416 | |
417 | /* Current input function. Set to mem_read for in-memory compression */ |
418 | |
419 | #ifdef DEBUG |
420 | ulg bits_sent; /* bit length of the compressed data */ |
421 | #endif |
422 | |
423 | /*uint32_t *crc_32_tab;*/ |
424 | uint32_t crc; /* shift register contents */ |
425 | }; |
426 | |
427 | #define G1 (*(ptr_to_globals - 1)) |
428 | |
429 | |
430 | /* =========================================================================== |
431 | * Write the output buffer outbuf[0..outcnt-1] and update bytes_out. |
432 | * (used for the compressed data only) |
433 | */ |
434 | static void flush_outbuf(void) |
435 | { |
436 | if (G1.outcnt == 0) |
437 | return; |
438 | |
439 | xwrite(ofd, (char *) G1.outbuf, G1.outcnt); |
440 | G1.outcnt = 0; |
441 | } |
442 | |
443 | |
444 | /* =========================================================================== |
445 | */ |
446 | /* put_8bit is used for the compressed output */ |
447 | #define put_8bit(c) \ |
448 | do { \ |
449 | G1.outbuf[G1.outcnt++] = (c); \ |
450 | if (G1.outcnt == OUTBUFSIZ) \ |
451 | flush_outbuf(); \ |
452 | } while (0) |
453 | |
454 | /* Output a 16 bit value, lsb first */ |
455 | static void put_16bit(ush w) |
456 | { |
457 | /* GCC 4.2.1 won't optimize out redundant loads of G1.outcnt |
458 | * (probably because of fear of aliasing with G1.outbuf[] |
459 | * stores), do it explicitly: |
460 | */ |
461 | unsigned outcnt = G1.outcnt; |
462 | uch *dst = &G1.outbuf[outcnt]; |
463 | |
464 | #if BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN |
465 | if (outcnt < OUTBUFSIZ-2) { |
466 | /* Common case */ |
467 | ush *dst16 = (void*) dst; |
468 | *dst16 = w; /* unalinged LSB 16-bit store */ |
469 | G1.outcnt = outcnt + 2; |
470 | return; |
471 | } |
472 | *dst = (uch)w; |
473 | w >>= 8; |
474 | #else |
475 | *dst = (uch)w; |
476 | w >>= 8; |
477 | if (outcnt < OUTBUFSIZ-2) { |
478 | /* Common case */ |
479 | dst[1] = w; |
480 | G1.outcnt = outcnt + 2; |
481 | return; |
482 | } |
483 | #endif |
484 | |
485 | /* Slowpath: we will need to do flush_outbuf() */ |
486 | G1.outcnt = ++outcnt; |
487 | if (outcnt == OUTBUFSIZ) |
488 | flush_outbuf(); |
489 | put_8bit(w); |
490 | } |
491 | |
492 | static void put_32bit(ulg n) |
493 | { |
494 | put_16bit(n); |
495 | put_16bit(n >> 16); |
496 | } |
497 | |
498 | /* =========================================================================== |
499 | * Run a set of bytes through the crc shift register. If s is a NULL |
500 | * pointer, then initialize the crc shift register contents instead. |
501 | * Return the current crc in either case. |
502 | */ |
503 | static void updcrc(uch * s, unsigned n) |
504 | { |
505 | G1.crc = crc32_block_endian0(G1.crc, s, n, global_crc32_table /*G1.crc_32_tab*/); |
506 | } |
507 | |
508 | |
509 | /* =========================================================================== |
510 | * Read a new buffer from the current input file, perform end-of-line |
511 | * translation, and update the crc and input file size. |
512 | * IN assertion: size >= 2 (for end-of-line translation) |
513 | */ |
514 | static unsigned file_read(void *buf, unsigned size) |
515 | { |
516 | unsigned len; |
517 | |
518 | Assert(G1.insize == 0, "l_buf not empty"); |
519 | |
520 | len = safe_read(ifd, buf, size); |
521 | if (len == (unsigned)(-1) || len == 0) |
522 | return len; |
523 | |
524 | updcrc(buf, len); |
525 | G1.isize += len; |
526 | return len; |
527 | } |
528 | |
529 | |
530 | /* =========================================================================== |
531 | * Send a value on a given number of bits. |
532 | * IN assertion: length <= 16 and value fits in length bits. |
533 | */ |
534 | static void send_bits(int value, int length) |
535 | { |
536 | #ifdef DEBUG |
537 | Tracev((stderr, " l %2d v %4x ", length, value)); |
538 | Assert(length > 0 && length <= 15, "invalid length"); |
539 | G1.bits_sent += length; |
540 | #endif |
541 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and |
542 | * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) |
543 | * unused bits in value. |
544 | */ |
545 | if (G1.bi_valid > (int) BUF_SIZE - length) { |
546 | G1.bi_buf |= (value << G1.bi_valid); |
547 | put_16bit(G1.bi_buf); |
548 | G1.bi_buf = (ush) value >> (BUF_SIZE - G1.bi_valid); |
549 | G1.bi_valid += length - BUF_SIZE; |
550 | } else { |
551 | G1.bi_buf |= value << G1.bi_valid; |
552 | G1.bi_valid += length; |
553 | } |
554 | } |
555 | |
556 | |
557 | /* =========================================================================== |
558 | * Reverse the first len bits of a code, using straightforward code (a faster |
559 | * method would use a table) |
560 | * IN assertion: 1 <= len <= 15 |
561 | */ |
562 | static unsigned bi_reverse(unsigned code, int len) |
563 | { |
564 | unsigned res = 0; |
565 | |
566 | while (1) { |
567 | res |= code & 1; |
568 | if (--len <= 0) return res; |
569 | code >>= 1; |
570 | res <<= 1; |
571 | } |
572 | } |
573 | |
574 | |
575 | /* =========================================================================== |
576 | * Write out any remaining bits in an incomplete byte. |
577 | */ |
578 | static void bi_windup(void) |
579 | { |
580 | if (G1.bi_valid > 8) { |
581 | put_16bit(G1.bi_buf); |
582 | } else if (G1.bi_valid > 0) { |
583 | put_8bit(G1.bi_buf); |
584 | } |
585 | G1.bi_buf = 0; |
586 | G1.bi_valid = 0; |
587 | #ifdef DEBUG |
588 | G1.bits_sent = (G1.bits_sent + 7) & ~7; |
589 | #endif |
590 | } |
591 | |
592 | |
593 | /* =========================================================================== |
594 | * Copy a stored block to the zip file, storing first the length and its |
595 | * one's complement if requested. |
596 | */ |
597 | static void copy_block(char *buf, unsigned len, int header) |
598 | { |
599 | bi_windup(); /* align on byte boundary */ |
600 | |
601 | if (header) { |
602 | put_16bit(len); |
603 | put_16bit(~len); |
604 | #ifdef DEBUG |
605 | G1.bits_sent += 2 * 16; |
606 | #endif |
607 | } |
608 | #ifdef DEBUG |
609 | G1.bits_sent += (ulg) len << 3; |
610 | #endif |
611 | while (len--) { |
612 | put_8bit(*buf++); |
613 | } |
614 | } |
615 | |
616 | |
617 | /* =========================================================================== |
618 | * Fill the window when the lookahead becomes insufficient. |
619 | * Updates strstart and lookahead, and sets eofile if end of input file. |
620 | * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 |
621 | * OUT assertions: at least one byte has been read, or eofile is set; |
622 | * file reads are performed for at least two bytes (required for the |
623 | * translate_eol option). |
624 | */ |
625 | static void fill_window(void) |
626 | { |
627 | unsigned n, m; |
628 | unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart; |
629 | /* Amount of free space at the end of the window. */ |
630 | |
631 | /* If the window is almost full and there is insufficient lookahead, |
632 | * move the upper half to the lower one to make room in the upper half. |
633 | */ |
634 | if (more == (unsigned) -1) { |
635 | /* Very unlikely, but possible on 16 bit machine if strstart == 0 |
636 | * and lookahead == 1 (input done one byte at time) |
637 | */ |
638 | more--; |
639 | } else if (G1.strstart >= WSIZE + MAX_DIST) { |
640 | /* By the IN assertion, the window is not empty so we can't confuse |
641 | * more == 0 with more == 64K on a 16 bit machine. |
642 | */ |
643 | Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM"); |
644 | |
645 | memcpy(G1.window, G1.window + WSIZE, WSIZE); |
646 | G1.match_start -= WSIZE; |
647 | G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */ |
648 | |
649 | G1.block_start -= WSIZE; |
650 | |
651 | for (n = 0; n < HASH_SIZE; n++) { |
652 | m = head[n]; |
653 | head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); |
654 | } |
655 | for (n = 0; n < WSIZE; n++) { |
656 | m = G1.prev[n]; |
657 | G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); |
658 | /* If n is not on any hash chain, prev[n] is garbage but |
659 | * its value will never be used. |
660 | */ |
661 | } |
662 | more += WSIZE; |
663 | } |
664 | /* At this point, more >= 2 */ |
665 | if (!G1.eofile) { |
666 | n = file_read(G1.window + G1.strstart + G1.lookahead, more); |
667 | if (n == 0 || n == (unsigned) -1) { |
668 | G1.eofile = 1; |
669 | } else { |
670 | G1.lookahead += n; |
671 | } |
672 | } |
673 | } |
674 | |
675 | |
676 | /* =========================================================================== |
677 | * Set match_start to the longest match starting at the given string and |
678 | * return its length. Matches shorter or equal to prev_length are discarded, |
679 | * in which case the result is equal to prev_length and match_start is |
680 | * garbage. |
681 | * IN assertions: cur_match is the head of the hash chain for the current |
682 | * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 |
683 | */ |
684 | |
685 | /* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or |
686 | * match.s. The code is functionally equivalent, so you can use the C version |
687 | * if desired. |
688 | */ |
689 | static int longest_match(IPos cur_match) |
690 | { |
691 | unsigned chain_length = max_chain_length; /* max hash chain length */ |
692 | uch *scan = G1.window + G1.strstart; /* current string */ |
693 | uch *match; /* matched string */ |
694 | int len; /* length of current match */ |
695 | int best_len = G1.prev_length; /* best match length so far */ |
696 | IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0; |
697 | /* Stop when cur_match becomes <= limit. To simplify the code, |
698 | * we prevent matches with the string of window index 0. |
699 | */ |
700 | |
701 | /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. |
702 | * It is easy to get rid of this optimization if necessary. |
703 | */ |
704 | #if HASH_BITS < 8 || MAX_MATCH != 258 |
705 | # error Code too clever |
706 | #endif |
707 | uch *strend = G1.window + G1.strstart + MAX_MATCH; |
708 | uch scan_end1 = scan[best_len - 1]; |
709 | uch scan_end = scan[best_len]; |
710 | |
711 | /* Do not waste too much time if we already have a good match: */ |
712 | if (G1.prev_length >= good_match) { |
713 | chain_length >>= 2; |
714 | } |
715 | Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead"); |
716 | |
717 | do { |
718 | Assert(cur_match < G1.strstart, "no future"); |
719 | match = G1.window + cur_match; |
720 | |
721 | /* Skip to next match if the match length cannot increase |
722 | * or if the match length is less than 2: |
723 | */ |
724 | if (match[best_len] != scan_end |
725 | || match[best_len - 1] != scan_end1 |
726 | || *match != *scan || *++match != scan[1] |
727 | ) { |
728 | continue; |
729 | } |
730 | |
731 | /* The check at best_len-1 can be removed because it will be made |
732 | * again later. (This heuristic is not always a win.) |
733 | * It is not necessary to compare scan[2] and match[2] since they |
734 | * are always equal when the other bytes match, given that |
735 | * the hash keys are equal and that HASH_BITS >= 8. |
736 | */ |
737 | scan += 2, match++; |
738 | |
739 | /* We check for insufficient lookahead only every 8th comparison; |
740 | * the 256th check will be made at strstart+258. |
741 | */ |
742 | do { |
743 | } while (*++scan == *++match && *++scan == *++match && |
744 | *++scan == *++match && *++scan == *++match && |
745 | *++scan == *++match && *++scan == *++match && |
746 | *++scan == *++match && *++scan == *++match && scan < strend); |
747 | |
748 | len = MAX_MATCH - (int) (strend - scan); |
749 | scan = strend - MAX_MATCH; |
750 | |
751 | if (len > best_len) { |
752 | G1.match_start = cur_match; |
753 | best_len = len; |
754 | if (len >= nice_match) |
755 | break; |
756 | scan_end1 = scan[best_len - 1]; |
757 | scan_end = scan[best_len]; |
758 | } |
759 | } while ((cur_match = G1.prev[cur_match & WMASK]) > limit |
760 | && --chain_length != 0); |
761 | |
762 | return best_len; |
763 | } |
764 | |
765 | |
766 | #ifdef DEBUG |
767 | /* =========================================================================== |
768 | * Check that the match at match_start is indeed a match. |
769 | */ |
770 | static void check_match(IPos start, IPos match, int length) |
771 | { |
772 | /* check that the match is indeed a match */ |
773 | if (memcmp(G1.window + match, G1.window + start, length) != 0) { |
774 | bb_error_msg(" start %d, match %d, length %d", start, match, length); |
775 | bb_error_msg("invalid match"); |
776 | } |
777 | if (verbose > 1) { |
778 | bb_error_msg("\\[%d,%d]", start - match, length); |
779 | do { |
780 | bb_putchar_stderr(G1.window[start++]); |
781 | } while (--length != 0); |
782 | } |
783 | } |
784 | #else |
785 | # define check_match(start, match, length) ((void)0) |
786 | #endif |
787 | |
788 | |
789 | /* trees.c -- output deflated data using Huffman coding |
790 | * Copyright (C) 1992-1993 Jean-loup Gailly |
791 | * This is free software; you can redistribute it and/or modify it under the |
792 | * terms of the GNU General Public License, see the file COPYING. |
793 | */ |
794 | |
795 | /* PURPOSE |
796 | * Encode various sets of source values using variable-length |
797 | * binary code trees. |
798 | * |
799 | * DISCUSSION |
800 | * The PKZIP "deflation" process uses several Huffman trees. The more |
801 | * common source values are represented by shorter bit sequences. |
802 | * |
803 | * Each code tree is stored in the ZIP file in a compressed form |
804 | * which is itself a Huffman encoding of the lengths of |
805 | * all the code strings (in ascending order by source values). |
806 | * The actual code strings are reconstructed from the lengths in |
807 | * the UNZIP process, as described in the "application note" |
808 | * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program. |
809 | * |
810 | * REFERENCES |
811 | * Lynch, Thomas J. |
812 | * Data Compression: Techniques and Applications, pp. 53-55. |
813 | * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7. |
814 | * |
815 | * Storer, James A. |
816 | * Data Compression: Methods and Theory, pp. 49-50. |
817 | * Computer Science Press, 1988. ISBN 0-7167-8156-5. |
818 | * |
819 | * Sedgewick, R. |
820 | * Algorithms, p290. |
821 | * Addison-Wesley, 1983. ISBN 0-201-06672-6. |
822 | * |
823 | * INTERFACE |
824 | * void ct_init() |
825 | * Allocate the match buffer, initialize the various tables [and save |
826 | * the location of the internal file attribute (ascii/binary) and |
827 | * method (DEFLATE/STORE) -- deleted in bbox] |
828 | * |
829 | * void ct_tally(int dist, int lc); |
830 | * Save the match info and tally the frequency counts. |
831 | * |
832 | * ulg flush_block(char *buf, ulg stored_len, int eof) |
833 | * Determine the best encoding for the current block: dynamic trees, |
834 | * static trees or store, and output the encoded block to the zip |
835 | * file. Returns the total compressed length for the file so far. |
836 | */ |
837 | |
838 | #define MAX_BITS 15 |
839 | /* All codes must not exceed MAX_BITS bits */ |
840 | |
841 | #define MAX_BL_BITS 7 |
842 | /* Bit length codes must not exceed MAX_BL_BITS bits */ |
843 | |
844 | #define LENGTH_CODES 29 |
845 | /* number of length codes, not counting the special END_BLOCK code */ |
846 | |
847 | #define LITERALS 256 |
848 | /* number of literal bytes 0..255 */ |
849 | |
850 | #define END_BLOCK 256 |
851 | /* end of block literal code */ |
852 | |
853 | #define L_CODES (LITERALS+1+LENGTH_CODES) |
854 | /* number of Literal or Length codes, including the END_BLOCK code */ |
855 | |
856 | #define D_CODES 30 |
857 | /* number of distance codes */ |
858 | |
859 | #define BL_CODES 19 |
860 | /* number of codes used to transfer the bit lengths */ |
861 | |
862 | /* extra bits for each length code */ |
863 | static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = { |
864 | 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, |
865 | 4, 4, 5, 5, 5, 5, 0 |
866 | }; |
867 | |
868 | /* extra bits for each distance code */ |
869 | static const uint8_t extra_dbits[D_CODES] ALIGN1 = { |
870 | 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, |
871 | 10, 10, 11, 11, 12, 12, 13, 13 |
872 | }; |
873 | |
874 | /* extra bits for each bit length code */ |
875 | static const uint8_t extra_blbits[BL_CODES] ALIGN1 = { |
876 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 }; |
877 | |
878 | /* number of codes at each bit length for an optimal tree */ |
879 | static const uint8_t bl_order[BL_CODES] ALIGN1 = { |
880 | 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; |
881 | |
882 | #define STORED_BLOCK 0 |
883 | #define STATIC_TREES 1 |
884 | #define DYN_TREES 2 |
885 | /* The three kinds of block type */ |
886 | |
887 | #ifndef LIT_BUFSIZE |
888 | # ifdef SMALL_MEM |
889 | # define LIT_BUFSIZE 0x2000 |
890 | # else |
891 | # ifdef MEDIUM_MEM |
892 | # define LIT_BUFSIZE 0x4000 |
893 | # else |
894 | # define LIT_BUFSIZE 0x8000 |
895 | # endif |
896 | # endif |
897 | #endif |
898 | #ifndef DIST_BUFSIZE |
899 | # define DIST_BUFSIZE LIT_BUFSIZE |
900 | #endif |
901 | /* Sizes of match buffers for literals/lengths and distances. There are |
902 | * 4 reasons for limiting LIT_BUFSIZE to 64K: |
903 | * - frequencies can be kept in 16 bit counters |
904 | * - if compression is not successful for the first block, all input data is |
905 | * still in the window so we can still emit a stored block even when input |
906 | * comes from standard input. (This can also be done for all blocks if |
907 | * LIT_BUFSIZE is not greater than 32K.) |
908 | * - if compression is not successful for a file smaller than 64K, we can |
909 | * even emit a stored file instead of a stored block (saving 5 bytes). |
910 | * - creating new Huffman trees less frequently may not provide fast |
911 | * adaptation to changes in the input data statistics. (Take for |
912 | * example a binary file with poorly compressible code followed by |
913 | * a highly compressible string table.) Smaller buffer sizes give |
914 | * fast adaptation but have of course the overhead of transmitting trees |
915 | * more frequently. |
916 | * - I can't count above 4 |
917 | * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save |
918 | * memory at the expense of compression). Some optimizations would be possible |
919 | * if we rely on DIST_BUFSIZE == LIT_BUFSIZE. |
920 | */ |
921 | #define REP_3_6 16 |
922 | /* repeat previous bit length 3-6 times (2 bits of repeat count) */ |
923 | #define REPZ_3_10 17 |
924 | /* repeat a zero length 3-10 times (3 bits of repeat count) */ |
925 | #define REPZ_11_138 18 |
926 | /* repeat a zero length 11-138 times (7 bits of repeat count) */ |
927 | |
928 | /* =========================================================================== |
929 | */ |
930 | /* Data structure describing a single value and its code string. */ |
931 | typedef struct ct_data { |
932 | union { |
933 | ush freq; /* frequency count */ |
934 | ush code; /* bit string */ |
935 | } fc; |
936 | union { |
937 | ush dad; /* father node in Huffman tree */ |
938 | ush len; /* length of bit string */ |
939 | } dl; |
940 | } ct_data; |
941 | |
942 | #define Freq fc.freq |
943 | #define Code fc.code |
944 | #define Dad dl.dad |
945 | #define Len dl.len |
946 | |
947 | #define HEAP_SIZE (2*L_CODES + 1) |
948 | /* maximum heap size */ |
949 | |
950 | typedef struct tree_desc { |
951 | ct_data *dyn_tree; /* the dynamic tree */ |
952 | ct_data *static_tree; /* corresponding static tree or NULL */ |
953 | const uint8_t *extra_bits; /* extra bits for each code or NULL */ |
954 | int extra_base; /* base index for extra_bits */ |
955 | int elems; /* max number of elements in the tree */ |
956 | int max_length; /* max bit length for the codes */ |
957 | int max_code; /* largest code with non zero frequency */ |
958 | } tree_desc; |
959 | |
960 | struct globals2 { |
961 | |
962 | ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */ |
963 | int heap_len; /* number of elements in the heap */ |
964 | int heap_max; /* element of largest frequency */ |
965 | |
966 | /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. |
967 | * The same heap array is used to build all trees. |
968 | */ |
969 | |
970 | ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */ |
971 | ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */ |
972 | |
973 | ct_data static_ltree[L_CODES + 2]; |
974 | |
975 | /* The static literal tree. Since the bit lengths are imposed, there is no |
976 | * need for the L_CODES extra codes used during heap construction. However |
977 | * The codes 286 and 287 are needed to build a canonical tree (see ct_init |
978 | * below). |
979 | */ |
980 | |
981 | ct_data static_dtree[D_CODES]; |
982 | |
983 | /* The static distance tree. (Actually a trivial tree since all codes use |
984 | * 5 bits.) |
985 | */ |
986 | |
987 | ct_data bl_tree[2 * BL_CODES + 1]; |
988 | |
989 | /* Huffman tree for the bit lengths */ |
990 | |
991 | tree_desc l_desc; |
992 | tree_desc d_desc; |
993 | tree_desc bl_desc; |
994 | |
995 | ush bl_count[MAX_BITS + 1]; |
996 | |
997 | /* The lengths of the bit length codes are sent in order of decreasing |
998 | * probability, to avoid transmitting the lengths for unused bit length codes. |
999 | */ |
1000 | |
1001 | uch depth[2 * L_CODES + 1]; |
1002 | |
1003 | /* Depth of each subtree used as tie breaker for trees of equal frequency */ |
1004 | |
1005 | uch length_code[MAX_MATCH - MIN_MATCH + 1]; |
1006 | |
1007 | /* length code for each normalized match length (0 == MIN_MATCH) */ |
1008 | |
1009 | uch dist_code[512]; |
1010 | |
1011 | /* distance codes. The first 256 values correspond to the distances |
1012 | * 3 .. 258, the last 256 values correspond to the top 8 bits of |
1013 | * the 15 bit distances. |
1014 | */ |
1015 | |
1016 | int base_length[LENGTH_CODES]; |
1017 | |
1018 | /* First normalized length for each code (0 = MIN_MATCH) */ |
1019 | |
1020 | int base_dist[D_CODES]; |
1021 | |
1022 | /* First normalized distance for each code (0 = distance of 1) */ |
1023 | |
1024 | uch flag_buf[LIT_BUFSIZE / 8]; |
1025 | |
1026 | /* flag_buf is a bit array distinguishing literals from lengths in |
1027 | * l_buf, thus indicating the presence or absence of a distance. |
1028 | */ |
1029 | |
1030 | unsigned last_lit; /* running index in l_buf */ |
1031 | unsigned last_dist; /* running index in d_buf */ |
1032 | unsigned last_flags; /* running index in flag_buf */ |
1033 | uch flags; /* current flags not yet saved in flag_buf */ |
1034 | uch flag_bit; /* current bit used in flags */ |
1035 | |
1036 | /* bits are filled in flags starting at bit 0 (least significant). |
1037 | * Note: these flags are overkill in the current code since we don't |
1038 | * take advantage of DIST_BUFSIZE == LIT_BUFSIZE. |
1039 | */ |
1040 | |
1041 | ulg opt_len; /* bit length of current block with optimal trees */ |
1042 | ulg static_len; /* bit length of current block with static trees */ |
1043 | |
1044 | ulg compressed_len; /* total bit length of compressed file */ |
1045 | }; |
1046 | |
1047 | #define G2ptr ((struct globals2*)(ptr_to_globals)) |
1048 | #define G2 (*G2ptr) |
1049 | |
1050 | |
1051 | /* =========================================================================== |
1052 | */ |
1053 | static void gen_codes(ct_data * tree, int max_code); |
1054 | static void build_tree(tree_desc * desc); |
1055 | static void scan_tree(ct_data * tree, int max_code); |
1056 | static void send_tree(ct_data * tree, int max_code); |
1057 | static int build_bl_tree(void); |
1058 | static void send_all_trees(int lcodes, int dcodes, int blcodes); |
1059 | static void compress_block(ct_data * ltree, ct_data * dtree); |
1060 | |
1061 | |
1062 | #ifndef DEBUG |
1063 | /* Send a code of the given tree. c and tree must not have side effects */ |
1064 | # define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len) |
1065 | #else |
1066 | # define SEND_CODE(c, tree) \ |
1067 | { \ |
1068 | if (verbose > 1) bb_error_msg("\ncd %3d ", (c)); \ |
1069 | send_bits(tree[c].Code, tree[c].Len); \ |
1070 | } |
1071 | #endif |
1072 | |
1073 | #define D_CODE(dist) \ |
1074 | ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)]) |
1075 | /* Mapping from a distance to a distance code. dist is the distance - 1 and |
1076 | * must not have side effects. dist_code[256] and dist_code[257] are never |
1077 | * used. |
1078 | * The arguments must not have side effects. |
1079 | */ |
1080 | |
1081 | |
1082 | /* =========================================================================== |
1083 | * Initialize a new block. |
1084 | */ |
1085 | static void init_block(void) |
1086 | { |
1087 | int n; /* iterates over tree elements */ |
1088 | |
1089 | /* Initialize the trees. */ |
1090 | for (n = 0; n < L_CODES; n++) |
1091 | G2.dyn_ltree[n].Freq = 0; |
1092 | for (n = 0; n < D_CODES; n++) |
1093 | G2.dyn_dtree[n].Freq = 0; |
1094 | for (n = 0; n < BL_CODES; n++) |
1095 | G2.bl_tree[n].Freq = 0; |
1096 | |
1097 | G2.dyn_ltree[END_BLOCK].Freq = 1; |
1098 | G2.opt_len = G2.static_len = 0; |
1099 | G2.last_lit = G2.last_dist = G2.last_flags = 0; |
1100 | G2.flags = 0; |
1101 | G2.flag_bit = 1; |
1102 | } |
1103 | |
1104 | |
1105 | /* =========================================================================== |
1106 | * Restore the heap property by moving down the tree starting at node k, |
1107 | * exchanging a node with the smallest of its two sons if necessary, stopping |
1108 | * when the heap property is re-established (each father smaller than its |
1109 | * two sons). |
1110 | */ |
1111 | |
1112 | /* Compares to subtrees, using the tree depth as tie breaker when |
1113 | * the subtrees have equal frequency. This minimizes the worst case length. */ |
1114 | #define SMALLER(tree, n, m) \ |
1115 | (tree[n].Freq < tree[m].Freq \ |
1116 | || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m])) |
1117 | |
1118 | static void pqdownheap(ct_data * tree, int k) |
1119 | { |
1120 | int v = G2.heap[k]; |
1121 | int j = k << 1; /* left son of k */ |
1122 | |
1123 | while (j <= G2.heap_len) { |
1124 | /* Set j to the smallest of the two sons: */ |
1125 | if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j])) |
1126 | j++; |
1127 | |
1128 | /* Exit if v is smaller than both sons */ |
1129 | if (SMALLER(tree, v, G2.heap[j])) |
1130 | break; |
1131 | |
1132 | /* Exchange v with the smallest son */ |
1133 | G2.heap[k] = G2.heap[j]; |
1134 | k = j; |
1135 | |
1136 | /* And continue down the tree, setting j to the left son of k */ |
1137 | j <<= 1; |
1138 | } |
1139 | G2.heap[k] = v; |
1140 | } |
1141 | |
1142 | |
1143 | /* =========================================================================== |
1144 | * Compute the optimal bit lengths for a tree and update the total bit length |
1145 | * for the current block. |
1146 | * IN assertion: the fields freq and dad are set, heap[heap_max] and |
1147 | * above are the tree nodes sorted by increasing frequency. |
1148 | * OUT assertions: the field len is set to the optimal bit length, the |
1149 | * array bl_count contains the frequencies for each bit length. |
1150 | * The length opt_len is updated; static_len is also updated if stree is |
1151 | * not null. |
1152 | */ |
1153 | static void gen_bitlen(tree_desc * desc) |
1154 | { |
1155 | ct_data *tree = desc->dyn_tree; |
1156 | const uint8_t *extra = desc->extra_bits; |
1157 | int base = desc->extra_base; |
1158 | int max_code = desc->max_code; |
1159 | int max_length = desc->max_length; |
1160 | ct_data *stree = desc->static_tree; |
1161 | int h; /* heap index */ |
1162 | int n, m; /* iterate over the tree elements */ |
1163 | int bits; /* bit length */ |
1164 | int xbits; /* extra bits */ |
1165 | ush f; /* frequency */ |
1166 | int overflow = 0; /* number of elements with bit length too large */ |
1167 | |
1168 | for (bits = 0; bits <= MAX_BITS; bits++) |
1169 | G2.bl_count[bits] = 0; |
1170 | |
1171 | /* In a first pass, compute the optimal bit lengths (which may |
1172 | * overflow in the case of the bit length tree). |
1173 | */ |
1174 | tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */ |
1175 | |
1176 | for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) { |
1177 | n = G2.heap[h]; |
1178 | bits = tree[tree[n].Dad].Len + 1; |
1179 | if (bits > max_length) { |
1180 | bits = max_length; |
1181 | overflow++; |
1182 | } |
1183 | tree[n].Len = (ush) bits; |
1184 | /* We overwrite tree[n].Dad which is no longer needed */ |
1185 | |
1186 | if (n > max_code) |
1187 | continue; /* not a leaf node */ |
1188 | |
1189 | G2.bl_count[bits]++; |
1190 | xbits = 0; |
1191 | if (n >= base) |
1192 | xbits = extra[n - base]; |
1193 | f = tree[n].Freq; |
1194 | G2.opt_len += (ulg) f *(bits + xbits); |
1195 | |
1196 | if (stree) |
1197 | G2.static_len += (ulg) f * (stree[n].Len + xbits); |
1198 | } |
1199 | if (overflow == 0) |
1200 | return; |
1201 | |
1202 | Trace((stderr, "\nbit length overflow\n")); |
1203 | /* This happens for example on obj2 and pic of the Calgary corpus */ |
1204 | |
1205 | /* Find the first bit length which could increase: */ |
1206 | do { |
1207 | bits = max_length - 1; |
1208 | while (G2.bl_count[bits] == 0) |
1209 | bits--; |
1210 | G2.bl_count[bits]--; /* move one leaf down the tree */ |
1211 | G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ |
1212 | G2.bl_count[max_length]--; |
1213 | /* The brother of the overflow item also moves one step up, |
1214 | * but this does not affect bl_count[max_length] |
1215 | */ |
1216 | overflow -= 2; |
1217 | } while (overflow > 0); |
1218 | |
1219 | /* Now recompute all bit lengths, scanning in increasing frequency. |
1220 | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all |
1221 | * lengths instead of fixing only the wrong ones. This idea is taken |
1222 | * from 'ar' written by Haruhiko Okumura.) |
1223 | */ |
1224 | for (bits = max_length; bits != 0; bits--) { |
1225 | n = G2.bl_count[bits]; |
1226 | while (n != 0) { |
1227 | m = G2.heap[--h]; |
1228 | if (m > max_code) |
1229 | continue; |
1230 | if (tree[m].Len != (unsigned) bits) { |
1231 | Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits)); |
1232 | G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq; |
1233 | tree[m].Len = bits; |
1234 | } |
1235 | n--; |
1236 | } |
1237 | } |
1238 | } |
1239 | |
1240 | |
1241 | /* =========================================================================== |
1242 | * Generate the codes for a given tree and bit counts (which need not be |
1243 | * optimal). |
1244 | * IN assertion: the array bl_count contains the bit length statistics for |
1245 | * the given tree and the field len is set for all tree elements. |
1246 | * OUT assertion: the field code is set for all tree elements of non |
1247 | * zero code length. |
1248 | */ |
1249 | static void gen_codes(ct_data * tree, int max_code) |
1250 | { |
1251 | ush next_code[MAX_BITS + 1]; /* next code value for each bit length */ |
1252 | ush code = 0; /* running code value */ |
1253 | int bits; /* bit index */ |
1254 | int n; /* code index */ |
1255 | |
1256 | /* The distribution counts are first used to generate the code values |
1257 | * without bit reversal. |
1258 | */ |
1259 | for (bits = 1; bits <= MAX_BITS; bits++) { |
1260 | next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1; |
1261 | } |
1262 | /* Check that the bit counts in bl_count are consistent. The last code |
1263 | * must be all ones. |
1264 | */ |
1265 | Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, |
1266 | "inconsistent bit counts"); |
1267 | Tracev((stderr, "\ngen_codes: max_code %d ", max_code)); |
1268 | |
1269 | for (n = 0; n <= max_code; n++) { |
1270 | int len = tree[n].Len; |
1271 | |
1272 | if (len == 0) |
1273 | continue; |
1274 | /* Now reverse the bits */ |
1275 | tree[n].Code = bi_reverse(next_code[len]++, len); |
1276 | |
1277 | Tracec(tree != G2.static_ltree, |
1278 | (stderr, "\nn %3d %c l %2d c %4x (%x) ", n, |
1279 | (n > ' ' ? n : ' '), len, tree[n].Code, |
1280 | next_code[len] - 1)); |
1281 | } |
1282 | } |
1283 | |
1284 | |
1285 | /* =========================================================================== |
1286 | * Construct one Huffman tree and assigns the code bit strings and lengths. |
1287 | * Update the total bit length for the current block. |
1288 | * IN assertion: the field freq is set for all tree elements. |
1289 | * OUT assertions: the fields len and code are set to the optimal bit length |
1290 | * and corresponding code. The length opt_len is updated; static_len is |
1291 | * also updated if stree is not null. The field max_code is set. |
1292 | */ |
1293 | |
1294 | /* Remove the smallest element from the heap and recreate the heap with |
1295 | * one less element. Updates heap and heap_len. */ |
1296 | |
1297 | #define SMALLEST 1 |
1298 | /* Index within the heap array of least frequent node in the Huffman tree */ |
1299 | |
1300 | #define PQREMOVE(tree, top) \ |
1301 | do { \ |
1302 | top = G2.heap[SMALLEST]; \ |
1303 | G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \ |
1304 | pqdownheap(tree, SMALLEST); \ |
1305 | } while (0) |
1306 | |
1307 | static void build_tree(tree_desc * desc) |
1308 | { |
1309 | ct_data *tree = desc->dyn_tree; |
1310 | ct_data *stree = desc->static_tree; |
1311 | int elems = desc->elems; |
1312 | int n, m; /* iterate over heap elements */ |
1313 | int max_code = -1; /* largest code with non zero frequency */ |
1314 | int node = elems; /* next internal node of the tree */ |
1315 | |
1316 | /* Construct the initial heap, with least frequent element in |
1317 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. |
1318 | * heap[0] is not used. |
1319 | */ |
1320 | G2.heap_len = 0; |
1321 | G2.heap_max = HEAP_SIZE; |
1322 | |
1323 | for (n = 0; n < elems; n++) { |
1324 | if (tree[n].Freq != 0) { |
1325 | G2.heap[++G2.heap_len] = max_code = n; |
1326 | G2.depth[n] = 0; |
1327 | } else { |
1328 | tree[n].Len = 0; |
1329 | } |
1330 | } |
1331 | |
1332 | /* The pkzip format requires that at least one distance code exists, |
1333 | * and that at least one bit should be sent even if there is only one |
1334 | * possible code. So to avoid special checks later on we force at least |
1335 | * two codes of non zero frequency. |
1336 | */ |
1337 | while (G2.heap_len < 2) { |
1338 | int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0); |
1339 | |
1340 | tree[new].Freq = 1; |
1341 | G2.depth[new] = 0; |
1342 | G2.opt_len--; |
1343 | if (stree) |
1344 | G2.static_len -= stree[new].Len; |
1345 | /* new is 0 or 1 so it does not have extra bits */ |
1346 | } |
1347 | desc->max_code = max_code; |
1348 | |
1349 | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, |
1350 | * establish sub-heaps of increasing lengths: |
1351 | */ |
1352 | for (n = G2.heap_len / 2; n >= 1; n--) |
1353 | pqdownheap(tree, n); |
1354 | |
1355 | /* Construct the Huffman tree by repeatedly combining the least two |
1356 | * frequent nodes. |
1357 | */ |
1358 | do { |
1359 | PQREMOVE(tree, n); /* n = node of least frequency */ |
1360 | m = G2.heap[SMALLEST]; /* m = node of next least frequency */ |
1361 | |
1362 | G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */ |
1363 | G2.heap[--G2.heap_max] = m; |
1364 | |
1365 | /* Create a new node father of n and m */ |
1366 | tree[node].Freq = tree[n].Freq + tree[m].Freq; |
1367 | G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1; |
1368 | tree[n].Dad = tree[m].Dad = (ush) node; |
1369 | #ifdef DUMP_BL_TREE |
1370 | if (tree == G2.bl_tree) { |
1371 | bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)", |
1372 | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); |
1373 | } |
1374 | #endif |
1375 | /* and insert the new node in the heap */ |
1376 | G2.heap[SMALLEST] = node++; |
1377 | pqdownheap(tree, SMALLEST); |
1378 | } while (G2.heap_len >= 2); |
1379 | |
1380 | G2.heap[--G2.heap_max] = G2.heap[SMALLEST]; |
1381 | |
1382 | /* At this point, the fields freq and dad are set. We can now |
1383 | * generate the bit lengths. |
1384 | */ |
1385 | gen_bitlen((tree_desc *) desc); |
1386 | |
1387 | /* The field len is now set, we can generate the bit codes */ |
1388 | gen_codes((ct_data *) tree, max_code); |
1389 | } |
1390 | |
1391 | |
1392 | /* =========================================================================== |
1393 | * Scan a literal or distance tree to determine the frequencies of the codes |
1394 | * in the bit length tree. Updates opt_len to take into account the repeat |
1395 | * counts. (The contribution of the bit length codes will be added later |
1396 | * during the construction of bl_tree.) |
1397 | */ |
1398 | static void scan_tree(ct_data * tree, int max_code) |
1399 | { |
1400 | int n; /* iterates over all tree elements */ |
1401 | int prevlen = -1; /* last emitted length */ |
1402 | int curlen; /* length of current code */ |
1403 | int nextlen = tree[0].Len; /* length of next code */ |
1404 | int count = 0; /* repeat count of the current code */ |
1405 | int max_count = 7; /* max repeat count */ |
1406 | int min_count = 4; /* min repeat count */ |
1407 | |
1408 | if (nextlen == 0) { |
1409 | max_count = 138; |
1410 | min_count = 3; |
1411 | } |
1412 | tree[max_code + 1].Len = 0xffff; /* guard */ |
1413 | |
1414 | for (n = 0; n <= max_code; n++) { |
1415 | curlen = nextlen; |
1416 | nextlen = tree[n + 1].Len; |
1417 | if (++count < max_count && curlen == nextlen) |
1418 | continue; |
1419 | |
1420 | if (count < min_count) { |
1421 | G2.bl_tree[curlen].Freq += count; |
1422 | } else if (curlen != 0) { |
1423 | if (curlen != prevlen) |
1424 | G2.bl_tree[curlen].Freq++; |
1425 | G2.bl_tree[REP_3_6].Freq++; |
1426 | } else if (count <= 10) { |
1427 | G2.bl_tree[REPZ_3_10].Freq++; |
1428 | } else { |
1429 | G2.bl_tree[REPZ_11_138].Freq++; |
1430 | } |
1431 | count = 0; |
1432 | prevlen = curlen; |
1433 | |
1434 | max_count = 7; |
1435 | min_count = 4; |
1436 | if (nextlen == 0) { |
1437 | max_count = 138; |
1438 | min_count = 3; |
1439 | } else if (curlen == nextlen) { |
1440 | max_count = 6; |
1441 | min_count = 3; |
1442 | } |
1443 | } |
1444 | } |
1445 | |
1446 | |
1447 | /* =========================================================================== |
1448 | * Send a literal or distance tree in compressed form, using the codes in |
1449 | * bl_tree. |
1450 | */ |
1451 | static void send_tree(ct_data * tree, int max_code) |
1452 | { |
1453 | int n; /* iterates over all tree elements */ |
1454 | int prevlen = -1; /* last emitted length */ |
1455 | int curlen; /* length of current code */ |
1456 | int nextlen = tree[0].Len; /* length of next code */ |
1457 | int count = 0; /* repeat count of the current code */ |
1458 | int max_count = 7; /* max repeat count */ |
1459 | int min_count = 4; /* min repeat count */ |
1460 | |
1461 | /* tree[max_code+1].Len = -1; *//* guard already set */ |
1462 | if (nextlen == 0) |
1463 | max_count = 138, min_count = 3; |
1464 | |
1465 | for (n = 0; n <= max_code; n++) { |
1466 | curlen = nextlen; |
1467 | nextlen = tree[n + 1].Len; |
1468 | if (++count < max_count && curlen == nextlen) { |
1469 | continue; |
1470 | } else if (count < min_count) { |
1471 | do { |
1472 | SEND_CODE(curlen, G2.bl_tree); |
1473 | } while (--count); |
1474 | } else if (curlen != 0) { |
1475 | if (curlen != prevlen) { |
1476 | SEND_CODE(curlen, G2.bl_tree); |
1477 | count--; |
1478 | } |
1479 | Assert(count >= 3 && count <= 6, " 3_6?"); |
1480 | SEND_CODE(REP_3_6, G2.bl_tree); |
1481 | send_bits(count - 3, 2); |
1482 | } else if (count <= 10) { |
1483 | SEND_CODE(REPZ_3_10, G2.bl_tree); |
1484 | send_bits(count - 3, 3); |
1485 | } else { |
1486 | SEND_CODE(REPZ_11_138, G2.bl_tree); |
1487 | send_bits(count - 11, 7); |
1488 | } |
1489 | count = 0; |
1490 | prevlen = curlen; |
1491 | if (nextlen == 0) { |
1492 | max_count = 138; |
1493 | min_count = 3; |
1494 | } else if (curlen == nextlen) { |
1495 | max_count = 6; |
1496 | min_count = 3; |
1497 | } else { |
1498 | max_count = 7; |
1499 | min_count = 4; |
1500 | } |
1501 | } |
1502 | } |
1503 | |
1504 | |
1505 | /* =========================================================================== |
1506 | * Construct the Huffman tree for the bit lengths and return the index in |
1507 | * bl_order of the last bit length code to send. |
1508 | */ |
1509 | static int build_bl_tree(void) |
1510 | { |
1511 | int max_blindex; /* index of last bit length code of non zero freq */ |
1512 | |
1513 | /* Determine the bit length frequencies for literal and distance trees */ |
1514 | scan_tree(G2.dyn_ltree, G2.l_desc.max_code); |
1515 | scan_tree(G2.dyn_dtree, G2.d_desc.max_code); |
1516 | |
1517 | /* Build the bit length tree: */ |
1518 | build_tree(&G2.bl_desc); |
1519 | /* opt_len now includes the length of the tree representations, except |
1520 | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. |
1521 | */ |
1522 | |
1523 | /* Determine the number of bit length codes to send. The pkzip format |
1524 | * requires that at least 4 bit length codes be sent. (appnote.txt says |
1525 | * 3 but the actual value used is 4.) |
1526 | */ |
1527 | for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { |
1528 | if (G2.bl_tree[bl_order[max_blindex]].Len != 0) |
1529 | break; |
1530 | } |
1531 | /* Update opt_len to include the bit length tree and counts */ |
1532 | G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; |
1533 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", G2.opt_len, G2.static_len)); |
1534 | |
1535 | return max_blindex; |
1536 | } |
1537 | |
1538 | |
1539 | /* =========================================================================== |
1540 | * Send the header for a block using dynamic Huffman trees: the counts, the |
1541 | * lengths of the bit length codes, the literal tree and the distance tree. |
1542 | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. |
1543 | */ |
1544 | static void send_all_trees(int lcodes, int dcodes, int blcodes) |
1545 | { |
1546 | int rank; /* index in bl_order */ |
1547 | |
1548 | Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); |
1549 | Assert(lcodes <= L_CODES && dcodes <= D_CODES |
1550 | && blcodes <= BL_CODES, "too many codes"); |
1551 | Tracev((stderr, "\nbl counts: ")); |
1552 | send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */ |
1553 | send_bits(dcodes - 1, 5); |
1554 | send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */ |
1555 | for (rank = 0; rank < blcodes; rank++) { |
1556 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); |
1557 | send_bits(G2.bl_tree[bl_order[rank]].Len, 3); |
1558 | } |
1559 | Tracev((stderr, "\nbl tree: sent %ld", G1.bits_sent)); |
1560 | |
1561 | send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */ |
1562 | Tracev((stderr, "\nlit tree: sent %ld", G1.bits_sent)); |
1563 | |
1564 | send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */ |
1565 | Tracev((stderr, "\ndist tree: sent %ld", G1.bits_sent)); |
1566 | } |
1567 | |
1568 | |
1569 | /* =========================================================================== |
1570 | * Save the match info and tally the frequency counts. Return true if |
1571 | * the current block must be flushed. |
1572 | */ |
1573 | static int ct_tally(int dist, int lc) |
1574 | { |
1575 | G1.l_buf[G2.last_lit++] = lc; |
1576 | if (dist == 0) { |
1577 | /* lc is the unmatched char */ |
1578 | G2.dyn_ltree[lc].Freq++; |
1579 | } else { |
1580 | /* Here, lc is the match length - MIN_MATCH */ |
1581 | dist--; /* dist = match distance - 1 */ |
1582 | Assert((ush) dist < (ush) MAX_DIST |
1583 | && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH) |
1584 | && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match" |
1585 | ); |
1586 | |
1587 | G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++; |
1588 | G2.dyn_dtree[D_CODE(dist)].Freq++; |
1589 | |
1590 | G1.d_buf[G2.last_dist++] = dist; |
1591 | G2.flags |= G2.flag_bit; |
1592 | } |
1593 | G2.flag_bit <<= 1; |
1594 | |
1595 | /* Output the flags if they fill a byte: */ |
1596 | if ((G2.last_lit & 7) == 0) { |
1597 | G2.flag_buf[G2.last_flags++] = G2.flags; |
1598 | G2.flags = 0; |
1599 | G2.flag_bit = 1; |
1600 | } |
1601 | /* Try to guess if it is profitable to stop the current block here */ |
1602 | if ((G2.last_lit & 0xfff) == 0) { |
1603 | /* Compute an upper bound for the compressed length */ |
1604 | ulg out_length = G2.last_lit * 8L; |
1605 | ulg in_length = (ulg) G1.strstart - G1.block_start; |
1606 | int dcode; |
1607 | |
1608 | for (dcode = 0; dcode < D_CODES; dcode++) { |
1609 | out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]); |
1610 | } |
1611 | out_length >>= 3; |
1612 | Trace((stderr, |
1613 | "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ", |
1614 | G2.last_lit, G2.last_dist, in_length, out_length, |
1615 | 100L - out_length * 100L / in_length)); |
1616 | if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2) |
1617 | return 1; |
1618 | } |
1619 | return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE); |
1620 | /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K |
1621 | * on 16 bit machines and because stored blocks are restricted to |
1622 | * 64K-1 bytes. |
1623 | */ |
1624 | } |
1625 | |
1626 | /* =========================================================================== |
1627 | * Send the block data compressed using the given Huffman trees |
1628 | */ |
1629 | static void compress_block(ct_data * ltree, ct_data * dtree) |
1630 | { |
1631 | unsigned dist; /* distance of matched string */ |
1632 | int lc; /* match length or unmatched char (if dist == 0) */ |
1633 | unsigned lx = 0; /* running index in l_buf */ |
1634 | unsigned dx = 0; /* running index in d_buf */ |
1635 | unsigned fx = 0; /* running index in flag_buf */ |
1636 | uch flag = 0; /* current flags */ |
1637 | unsigned code; /* the code to send */ |
1638 | int extra; /* number of extra bits to send */ |
1639 | |
1640 | if (G2.last_lit != 0) do { |
1641 | if ((lx & 7) == 0) |
1642 | flag = G2.flag_buf[fx++]; |
1643 | lc = G1.l_buf[lx++]; |
1644 | if ((flag & 1) == 0) { |
1645 | SEND_CODE(lc, ltree); /* send a literal byte */ |
1646 | Tracecv(lc > ' ', (stderr, " '%c' ", lc)); |
1647 | } else { |
1648 | /* Here, lc is the match length - MIN_MATCH */ |
1649 | code = G2.length_code[lc]; |
1650 | SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */ |
1651 | extra = extra_lbits[code]; |
1652 | if (extra != 0) { |
1653 | lc -= G2.base_length[code]; |
1654 | send_bits(lc, extra); /* send the extra length bits */ |
1655 | } |
1656 | dist = G1.d_buf[dx++]; |
1657 | /* Here, dist is the match distance - 1 */ |
1658 | code = D_CODE(dist); |
1659 | Assert(code < D_CODES, "bad d_code"); |
1660 | |
1661 | SEND_CODE(code, dtree); /* send the distance code */ |
1662 | extra = extra_dbits[code]; |
1663 | if (extra != 0) { |
1664 | dist -= G2.base_dist[code]; |
1665 | send_bits(dist, extra); /* send the extra distance bits */ |
1666 | } |
1667 | } /* literal or match pair ? */ |
1668 | flag >>= 1; |
1669 | } while (lx < G2.last_lit); |
1670 | |
1671 | SEND_CODE(END_BLOCK, ltree); |
1672 | } |
1673 | |
1674 | |
1675 | /* =========================================================================== |
1676 | * Determine the best encoding for the current block: dynamic trees, static |
1677 | * trees or store, and output the encoded block to the zip file. This function |
1678 | * returns the total compressed length for the file so far. |
1679 | */ |
1680 | static ulg flush_block(char *buf, ulg stored_len, int eof) |
1681 | { |
1682 | ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ |
1683 | int max_blindex; /* index of last bit length code of non zero freq */ |
1684 | |
1685 | G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */ |
1686 | |
1687 | /* Construct the literal and distance trees */ |
1688 | build_tree(&G2.l_desc); |
1689 | Tracev((stderr, "\nlit data: dyn %ld, stat %ld", G2.opt_len, G2.static_len)); |
1690 | |
1691 | build_tree(&G2.d_desc); |
1692 | Tracev((stderr, "\ndist data: dyn %ld, stat %ld", G2.opt_len, G2.static_len)); |
1693 | /* At this point, opt_len and static_len are the total bit lengths of |
1694 | * the compressed block data, excluding the tree representations. |
1695 | */ |
1696 | |
1697 | /* Build the bit length tree for the above two trees, and get the index |
1698 | * in bl_order of the last bit length code to send. |
1699 | */ |
1700 | max_blindex = build_bl_tree(); |
1701 | |
1702 | /* Determine the best encoding. Compute first the block length in bytes */ |
1703 | opt_lenb = (G2.opt_len + 3 + 7) >> 3; |
1704 | static_lenb = (G2.static_len + 3 + 7) >> 3; |
1705 | |
1706 | Trace((stderr, |
1707 | "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ", |
1708 | opt_lenb, G2.opt_len, static_lenb, G2.static_len, stored_len, |
1709 | G2.last_lit, G2.last_dist)); |
1710 | |
1711 | if (static_lenb <= opt_lenb) |
1712 | opt_lenb = static_lenb; |
1713 | |
1714 | /* If compression failed and this is the first and last block, |
1715 | * and if the zip file can be seeked (to rewrite the local header), |
1716 | * the whole file is transformed into a stored file: |
1717 | */ |
1718 | if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) { |
1719 | /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */ |
1720 | if (buf == NULL) |
1721 | bb_error_msg("block vanished"); |
1722 | |
1723 | copy_block(buf, (unsigned) stored_len, 0); /* without header */ |
1724 | G2.compressed_len = stored_len << 3; |
1725 | } else if (stored_len + 4 <= opt_lenb && buf != NULL) { |
1726 | /* 4: two words for the lengths */ |
1727 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
1728 | * Otherwise we can't have processed more than WSIZE input bytes since |
1729 | * the last block flush, because compression would have been |
1730 | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to |
1731 | * transform a block into a stored block. |
1732 | */ |
1733 | send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */ |
1734 | G2.compressed_len = (G2.compressed_len + 3 + 7) & ~7L; |
1735 | G2.compressed_len += (stored_len + 4) << 3; |
1736 | |
1737 | copy_block(buf, (unsigned) stored_len, 1); /* with header */ |
1738 | } else if (static_lenb == opt_lenb) { |
1739 | send_bits((STATIC_TREES << 1) + eof, 3); |
1740 | compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree); |
1741 | G2.compressed_len += 3 + G2.static_len; |
1742 | } else { |
1743 | send_bits((DYN_TREES << 1) + eof, 3); |
1744 | send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1, |
1745 | max_blindex + 1); |
1746 | compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree); |
1747 | G2.compressed_len += 3 + G2.opt_len; |
1748 | } |
1749 | Assert(G2.compressed_len == G1.bits_sent, "bad compressed size"); |
1750 | init_block(); |
1751 | |
1752 | if (eof) { |
1753 | bi_windup(); |
1754 | G2.compressed_len += 7; /* align on byte boundary */ |
1755 | } |
1756 | Tracev((stderr, "\ncomprlen %lu(%lu) ", G2.compressed_len >> 3, |
1757 | G2.compressed_len - 7 * eof)); |
1758 | |
1759 | return G2.compressed_len >> 3; |
1760 | } |
1761 | |
1762 | |
1763 | /* =========================================================================== |
1764 | * Update a hash value with the given input byte |
1765 | * IN assertion: all calls to UPDATE_HASH are made with consecutive |
1766 | * input characters, so that a running hash key can be computed from the |
1767 | * previous key instead of complete recalculation each time. |
1768 | */ |
1769 | #define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK) |
1770 | |
1771 | |
1772 | /* =========================================================================== |
1773 | * Same as above, but achieves better compression. We use a lazy |
1774 | * evaluation for matches: a match is finally adopted only if there is |
1775 | * no better match at the next window position. |
1776 | * |
1777 | * Processes a new input file and return its compressed length. Sets |
1778 | * the compressed length, crc, deflate flags and internal file |
1779 | * attributes. |
1780 | */ |
1781 | |
1782 | /* Flush the current block, with given end-of-file flag. |
1783 | * IN assertion: strstart is set to the end of the current match. */ |
1784 | #define FLUSH_BLOCK(eof) \ |
1785 | flush_block( \ |
1786 | G1.block_start >= 0L \ |
1787 | ? (char*)&G1.window[(unsigned)G1.block_start] \ |
1788 | : (char*)NULL, \ |
1789 | (ulg)G1.strstart - G1.block_start, \ |
1790 | (eof) \ |
1791 | ) |
1792 | |
1793 | /* Insert string s in the dictionary and set match_head to the previous head |
1794 | * of the hash chain (the most recent string with same hash key). Return |
1795 | * the previous length of the hash chain. |
1796 | * IN assertion: all calls to INSERT_STRING are made with consecutive |
1797 | * input characters and the first MIN_MATCH bytes of s are valid |
1798 | * (except for the last MIN_MATCH-1 bytes of the input file). */ |
1799 | #define INSERT_STRING(s, match_head) \ |
1800 | do { \ |
1801 | UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \ |
1802 | G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \ |
1803 | head[G1.ins_h] = (s); \ |
1804 | } while (0) |
1805 | |
1806 | static ulg deflate(void) |
1807 | { |
1808 | IPos hash_head; /* head of hash chain */ |
1809 | IPos prev_match; /* previous match */ |
1810 | int flush; /* set if current block must be flushed */ |
1811 | int match_available = 0; /* set if previous match exists */ |
1812 | unsigned match_length = MIN_MATCH - 1; /* length of best match */ |
1813 | |
1814 | /* Process the input block. */ |
1815 | while (G1.lookahead != 0) { |
1816 | /* Insert the string window[strstart .. strstart+2] in the |
1817 | * dictionary, and set hash_head to the head of the hash chain: |
1818 | */ |
1819 | INSERT_STRING(G1.strstart, hash_head); |
1820 | |
1821 | /* Find the longest match, discarding those <= prev_length. |
1822 | */ |
1823 | G1.prev_length = match_length; |
1824 | prev_match = G1.match_start; |
1825 | match_length = MIN_MATCH - 1; |
1826 | |
1827 | if (hash_head != 0 && G1.prev_length < max_lazy_match |
1828 | && G1.strstart - hash_head <= MAX_DIST |
1829 | ) { |
1830 | /* To simplify the code, we prevent matches with the string |
1831 | * of window index 0 (in particular we have to avoid a match |
1832 | * of the string with itself at the start of the input file). |
1833 | */ |
1834 | match_length = longest_match(hash_head); |
1835 | /* longest_match() sets match_start */ |
1836 | if (match_length > G1.lookahead) |
1837 | match_length = G1.lookahead; |
1838 | |
1839 | /* Ignore a length 3 match if it is too distant: */ |
1840 | if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) { |
1841 | /* If prev_match is also MIN_MATCH, G1.match_start is garbage |
1842 | * but we will ignore the current match anyway. |
1843 | */ |
1844 | match_length--; |
1845 | } |
1846 | } |
1847 | /* If there was a match at the previous step and the current |
1848 | * match is not better, output the previous match: |
1849 | */ |
1850 | if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) { |
1851 | check_match(G1.strstart - 1, prev_match, G1.prev_length); |
1852 | flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH); |
1853 | |
1854 | /* Insert in hash table all strings up to the end of the match. |
1855 | * strstart-1 and strstart are already inserted. |
1856 | */ |
1857 | G1.lookahead -= G1.prev_length - 1; |
1858 | G1.prev_length -= 2; |
1859 | do { |
1860 | G1.strstart++; |
1861 | INSERT_STRING(G1.strstart, hash_head); |
1862 | /* strstart never exceeds WSIZE-MAX_MATCH, so there are |
1863 | * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH |
1864 | * these bytes are garbage, but it does not matter since the |
1865 | * next lookahead bytes will always be emitted as literals. |
1866 | */ |
1867 | } while (--G1.prev_length != 0); |
1868 | match_available = 0; |
1869 | match_length = MIN_MATCH - 1; |
1870 | G1.strstart++; |
1871 | if (flush) { |
1872 | FLUSH_BLOCK(0); |
1873 | G1.block_start = G1.strstart; |
1874 | } |
1875 | } else if (match_available) { |
1876 | /* If there was no match at the previous position, output a |
1877 | * single literal. If there was a match but the current match |
1878 | * is longer, truncate the previous match to a single literal. |
1879 | */ |
1880 | Tracevv((stderr, "%c", G1.window[G1.strstart - 1])); |
1881 | if (ct_tally(0, G1.window[G1.strstart - 1])) { |
1882 | FLUSH_BLOCK(0); |
1883 | G1.block_start = G1.strstart; |
1884 | } |
1885 | G1.strstart++; |
1886 | G1.lookahead--; |
1887 | } else { |
1888 | /* There is no previous match to compare with, wait for |
1889 | * the next step to decide. |
1890 | */ |
1891 | match_available = 1; |
1892 | G1.strstart++; |
1893 | G1.lookahead--; |
1894 | } |
1895 | Assert(G1.strstart <= G1.isize && lookahead <= G1.isize, "a bit too far"); |
1896 | |
1897 | /* Make sure that we always have enough lookahead, except |
1898 | * at the end of the input file. We need MAX_MATCH bytes |
1899 | * for the next match, plus MIN_MATCH bytes to insert the |
1900 | * string following the next match. |
1901 | */ |
1902 | while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile) |
1903 | fill_window(); |
1904 | } |
1905 | if (match_available) |
1906 | ct_tally(0, G1.window[G1.strstart - 1]); |
1907 | |
1908 | return FLUSH_BLOCK(1); /* eof */ |
1909 | } |
1910 | |
1911 | |
1912 | /* =========================================================================== |
1913 | * Initialize the bit string routines. |
1914 | */ |
1915 | static void bi_init(void) |
1916 | { |
1917 | G1.bi_buf = 0; |
1918 | G1.bi_valid = 0; |
1919 | #ifdef DEBUG |
1920 | G1.bits_sent = 0L; |
1921 | #endif |
1922 | } |
1923 | |
1924 | |
1925 | /* =========================================================================== |
1926 | * Initialize the "longest match" routines for a new file |
1927 | */ |
1928 | static void lm_init(ush * flagsp) |
1929 | { |
1930 | unsigned j; |
1931 | |
1932 | /* Initialize the hash table. */ |
1933 | memset(head, 0, HASH_SIZE * sizeof(*head)); |
1934 | /* prev will be initialized on the fly */ |
1935 | |
1936 | /* speed options for the general purpose bit flag */ |
1937 | *flagsp |= 2; /* FAST 4, SLOW 2 */ |
1938 | /* ??? reduce max_chain_length for binary files */ |
1939 | |
1940 | G1.strstart = 0; |
1941 | G1.block_start = 0L; |
1942 | |
1943 | G1.lookahead = file_read(G1.window, |
1944 | sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE); |
1945 | |
1946 | if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) { |
1947 | G1.eofile = 1; |
1948 | G1.lookahead = 0; |
1949 | return; |
1950 | } |
1951 | G1.eofile = 0; |
1952 | /* Make sure that we always have enough lookahead. This is important |
1953 | * if input comes from a device such as a tty. |
1954 | */ |
1955 | while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile) |
1956 | fill_window(); |
1957 | |
1958 | G1.ins_h = 0; |
1959 | for (j = 0; j < MIN_MATCH - 1; j++) |
1960 | UPDATE_HASH(G1.ins_h, G1.window[j]); |
1961 | /* If lookahead < MIN_MATCH, ins_h is garbage, but this is |
1962 | * not important since only literal bytes will be emitted. |
1963 | */ |
1964 | } |
1965 | |
1966 | |
1967 | /* =========================================================================== |
1968 | * Allocate the match buffer, initialize the various tables and save the |
1969 | * location of the internal file attribute (ascii/binary) and method |
1970 | * (DEFLATE/STORE). |
1971 | * One callsite in zip() |
1972 | */ |
1973 | static void ct_init(void) |
1974 | { |
1975 | int n; /* iterates over tree elements */ |
1976 | int length; /* length value */ |
1977 | int code; /* code value */ |
1978 | int dist; /* distance index */ |
1979 | |
1980 | G2.compressed_len = 0L; |
1981 | |
1982 | #ifdef NOT_NEEDED |
1983 | if (G2.static_dtree[0].Len != 0) |
1984 | return; /* ct_init already called */ |
1985 | #endif |
1986 | |
1987 | /* Initialize the mapping length (0..255) -> length code (0..28) */ |
1988 | length = 0; |
1989 | for (code = 0; code < LENGTH_CODES - 1; code++) { |
1990 | G2.base_length[code] = length; |
1991 | for (n = 0; n < (1 << extra_lbits[code]); n++) { |
1992 | G2.length_code[length++] = code; |
1993 | } |
1994 | } |
1995 | Assert(length == 256, "ct_init: length != 256"); |
1996 | /* Note that the length 255 (match length 258) can be represented |
1997 | * in two different ways: code 284 + 5 bits or code 285, so we |
1998 | * overwrite length_code[255] to use the best encoding: |
1999 | */ |
2000 | G2.length_code[length - 1] = code; |
2001 | |
2002 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
2003 | dist = 0; |
2004 | for (code = 0; code < 16; code++) { |
2005 | G2.base_dist[code] = dist; |
2006 | for (n = 0; n < (1 << extra_dbits[code]); n++) { |
2007 | G2.dist_code[dist++] = code; |
2008 | } |
2009 | } |
2010 | Assert(dist == 256, "ct_init: dist != 256"); |
2011 | dist >>= 7; /* from now on, all distances are divided by 128 */ |
2012 | for (; code < D_CODES; code++) { |
2013 | G2.base_dist[code] = dist << 7; |
2014 | for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { |
2015 | G2.dist_code[256 + dist++] = code; |
2016 | } |
2017 | } |
2018 | Assert(dist == 256, "ct_init: 256+dist != 512"); |
2019 | |
2020 | /* Construct the codes of the static literal tree */ |
2021 | /* already zeroed - it's in bss |
2022 | for (n = 0; n <= MAX_BITS; n++) |
2023 | G2.bl_count[n] = 0; */ |
2024 | |
2025 | n = 0; |
2026 | while (n <= 143) { |
2027 | G2.static_ltree[n++].Len = 8; |
2028 | G2.bl_count[8]++; |
2029 | } |
2030 | while (n <= 255) { |
2031 | G2.static_ltree[n++].Len = 9; |
2032 | G2.bl_count[9]++; |
2033 | } |
2034 | while (n <= 279) { |
2035 | G2.static_ltree[n++].Len = 7; |
2036 | G2.bl_count[7]++; |
2037 | } |
2038 | while (n <= 287) { |
2039 | G2.static_ltree[n++].Len = 8; |
2040 | G2.bl_count[8]++; |
2041 | } |
2042 | /* Codes 286 and 287 do not exist, but we must include them in the |
2043 | * tree construction to get a canonical Huffman tree (longest code |
2044 | * all ones) |
2045 | */ |
2046 | gen_codes((ct_data *) G2.static_ltree, L_CODES + 1); |
2047 | |
2048 | /* The static distance tree is trivial: */ |
2049 | for (n = 0; n < D_CODES; n++) { |
2050 | G2.static_dtree[n].Len = 5; |
2051 | G2.static_dtree[n].Code = bi_reverse(n, 5); |
2052 | } |
2053 | |
2054 | /* Initialize the first block of the first file: */ |
2055 | init_block(); |
2056 | } |
2057 | |
2058 | |
2059 | /* =========================================================================== |
2060 | * Deflate in to out. |
2061 | * IN assertions: the input and output buffers are cleared. |
2062 | */ |
2063 | |
2064 | static void zip(void) |
2065 | { |
2066 | ush deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */ |
2067 | |
2068 | G1.outcnt = 0; |
2069 | |
2070 | /* Write the header to the gzip file. See algorithm.doc for the format */ |
2071 | /* magic header for gzip files: 1F 8B */ |
2072 | /* compression method: 8 (DEFLATED) */ |
2073 | /* general flags: 0 */ |
2074 | put_32bit(0x00088b1f); |
2075 | put_32bit(0); /* Unix timestamp */ |
2076 | |
2077 | /* Write deflated file to zip file */ |
2078 | G1.crc = ~0; |
2079 | |
2080 | bi_init(); |
2081 | ct_init(); |
2082 | lm_init(&deflate_flags); |
2083 | |
2084 | put_8bit(deflate_flags); /* extra flags */ |
2085 | put_8bit(3); /* OS identifier = 3 (Unix) */ |
2086 | |
2087 | deflate(); |
2088 | |
2089 | /* Write the crc and uncompressed size */ |
2090 | put_32bit(~G1.crc); |
2091 | put_32bit(G1.isize); |
2092 | |
2093 | flush_outbuf(); |
2094 | } |
2095 | |
2096 | |
2097 | /* ======================================================================== */ |
2098 | static |
2099 | IF_DESKTOP(long long) int FAST_FUNC pack_gzip(transformer_state_t *xstate UNUSED_PARAM) |
2100 | { |
2101 | /* Clear input and output buffers */ |
2102 | G1.outcnt = 0; |
2103 | #ifdef DEBUG |
2104 | G1.insize = 0; |
2105 | #endif |
2106 | G1.isize = 0; |
2107 | |
2108 | /* Reinit G2.xxx */ |
2109 | memset(&G2, 0, sizeof(G2)); |
2110 | G2.l_desc.dyn_tree = G2.dyn_ltree; |
2111 | G2.l_desc.static_tree = G2.static_ltree; |
2112 | G2.l_desc.extra_bits = extra_lbits; |
2113 | G2.l_desc.extra_base = LITERALS + 1; |
2114 | G2.l_desc.elems = L_CODES; |
2115 | G2.l_desc.max_length = MAX_BITS; |
2116 | //G2.l_desc.max_code = 0; |
2117 | G2.d_desc.dyn_tree = G2.dyn_dtree; |
2118 | G2.d_desc.static_tree = G2.static_dtree; |
2119 | G2.d_desc.extra_bits = extra_dbits; |
2120 | //G2.d_desc.extra_base = 0; |
2121 | G2.d_desc.elems = D_CODES; |
2122 | G2.d_desc.max_length = MAX_BITS; |
2123 | //G2.d_desc.max_code = 0; |
2124 | G2.bl_desc.dyn_tree = G2.bl_tree; |
2125 | //G2.bl_desc.static_tree = NULL; |
2126 | G2.bl_desc.extra_bits = extra_blbits, |
2127 | //G2.bl_desc.extra_base = 0; |
2128 | G2.bl_desc.elems = BL_CODES; |
2129 | G2.bl_desc.max_length = MAX_BL_BITS; |
2130 | //G2.bl_desc.max_code = 0; |
2131 | |
2132 | #if 0 |
2133 | /* Saving of timestamp is disabled. Why? |
2134 | * - it is not Y2038-safe. |
2135 | * - some people want deterministic results |
2136 | * (normally they'd use -n, but our -n is a nop). |
2137 | * - it's bloat. |
2138 | * Per RFC 1952, gzfile.time=0 is "no timestamp". |
2139 | * If users will demand this to be reinstated, |
2140 | * implement -n "don't save timestamp". |
2141 | */ |
2142 | struct stat s; |
2143 | s.st_ctime = 0; |
2144 | fstat(STDIN_FILENO, &s); |
2145 | zip(s.st_ctime); |
2146 | #else |
2147 | zip(); |
2148 | #endif |
2149 | return 0; |
2150 | } |
2151 | |
2152 | #if ENABLE_FEATURE_GZIP_LONG_OPTIONS |
2153 | static const char gzip_longopts[] ALIGN1 = |
2154 | "stdout\0" No_argument "c" |
2155 | "to-stdout\0" No_argument "c" |
2156 | "force\0" No_argument "f" |
2157 | "verbose\0" No_argument "v" |
2158 | #if ENABLE_GUNZIP |
2159 | "decompress\0" No_argument "d" |
2160 | "uncompress\0" No_argument "d" |
2161 | "test\0" No_argument "t" |
2162 | #endif |
2163 | "quiet\0" No_argument "q" |
2164 | "fast\0" No_argument "1" |
2165 | "best\0" No_argument "9" |
2166 | "no-name\0" No_argument "n" |
2167 | ; |
2168 | #endif |
2169 | |
2170 | /* |
2171 | * Linux kernel build uses gzip -d -n. We accept and ignore -n. |
2172 | * Man page says: |
2173 | * -n --no-name |
2174 | * gzip: do not save the original file name and time stamp. |
2175 | * (The original name is always saved if the name had to be truncated.) |
2176 | * gunzip: do not restore the original file name/time even if present |
2177 | * (remove only the gzip suffix from the compressed file name). |
2178 | * This option is the default when decompressing. |
2179 | * -N --name |
2180 | * gzip: always save the original file name and time stamp (this is the default) |
2181 | * gunzip: restore the original file name and time stamp if present. |
2182 | */ |
2183 | |
2184 | int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE; |
2185 | #if ENABLE_GUNZIP |
2186 | int gzip_main(int argc, char **argv) |
2187 | #else |
2188 | int gzip_main(int argc UNUSED_PARAM, char **argv) |
2189 | #endif |
2190 | { |
2191 | unsigned opt; |
2192 | #ifdef ENABLE_FEATURE_GZIP_LEVELS |
2193 | static const struct { |
2194 | uint8_t good; |
2195 | uint8_t chain_shift; |
2196 | uint8_t lazy2; |
2197 | uint8_t nice2; |
2198 | } gzip_level_config[6] = { |
2199 | {4, 4, 4/2, 16/2}, /* Level 4 */ |
2200 | {8, 5, 16/2, 32/2}, /* Level 5 */ |
2201 | {8, 7, 16/2, 128/2}, /* Level 6 */ |
2202 | {8, 8, 32/2, 128/2}, /* Level 7 */ |
2203 | {32, 10, 128/2, 258/2}, /* Level 8 */ |
2204 | {32, 12, 258/2, 258/2}, /* Level 9 */ |
2205 | }; |
2206 | #endif |
2207 | |
2208 | SET_PTR_TO_GLOBALS((char *)xzalloc(sizeof(struct globals)+sizeof(struct globals2)) |
2209 | + sizeof(struct globals)); |
2210 | |
2211 | #if ENABLE_FEATURE_GZIP_LONG_OPTIONS |
2212 | applet_long_options = gzip_longopts; |
2213 | #endif |
2214 | /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */ |
2215 | opt = getopt32(argv, "cfv" IF_GUNZIP("dt") "qn123456789"); |
2216 | #if ENABLE_GUNZIP /* gunzip_main may not be visible... */ |
2217 | if (opt & 0x18) // -d and/or -t |
2218 | return gunzip_main(argc, argv); |
2219 | #endif |
2220 | #ifdef ENABLE_FEATURE_GZIP_LEVELS |
2221 | opt >>= ENABLE_GUNZIP ? 7 : 5; /* drop cfv[dt]qn bits */ |
2222 | if (opt == 0) |
2223 | opt = 1 << 6; /* default: 6 */ |
2224 | opt = ffs(opt >> 4); /* Maps -1..-4 to [0], -5 to [1] ... -9 to [5] */ |
2225 | max_chain_length = 1 << gzip_level_config[opt].chain_shift; |
2226 | good_match = gzip_level_config[opt].good; |
2227 | max_lazy_match = gzip_level_config[opt].lazy2 * 2; |
2228 | nice_match = gzip_level_config[opt].nice2 * 2; |
2229 | #endif |
2230 | option_mask32 &= 0x7; /* retain only -cfv */ |
2231 | |
2232 | /* Allocate all global buffers (for DYN_ALLOC option) */ |
2233 | ALLOC(uch, G1.l_buf, INBUFSIZ); |
2234 | ALLOC(uch, G1.outbuf, OUTBUFSIZ); |
2235 | ALLOC(ush, G1.d_buf, DIST_BUFSIZE); |
2236 | ALLOC(uch, G1.window, 2L * WSIZE); |
2237 | ALLOC(ush, G1.prev, 1L << BITS); |
2238 | |
2239 | /* Initialize the CRC32 table */ |
2240 | global_crc32_table = crc32_filltable(NULL, 0); |
2241 | |
2242 | argv += optind; |
2243 | return bbunpack(argv, pack_gzip, append_ext, "gz"); |
2244 | } |
2245 |