Lines Matching refs:s

139 local void init_block     OF((deflate_state *s));
140 local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
141 local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
143 local void build_tree OF((deflate_state *s, tree_desc *desc));
144 local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
145 local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
146 local int build_bl_tree OF((deflate_state *s));
147 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
149 local void compress_block OF((deflate_state *s, const ct_data *ltree,
151 local int detect_data_type OF((deflate_state *s));
153 local void bi_windup OF((deflate_state *s));
154 local void bi_flush OF((deflate_state *s));
161 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) argument
165 # define send_code(s, c, tree) \ argument
167 send_bits(s, tree[c].Code, tree[c].Len); }
174 #define put_short(s, w) { \ argument
175 put_byte(s, (uch)((w) & 0xff)); \
176 put_byte(s, (uch)((ush)(w) >> 8)); \
184 local void send_bits OF((deflate_state *s, int value, int length));
186 local void send_bits(s, value, length) in send_bits() argument
187 deflate_state *s; in send_bits()
193 s->bits_sent += (ulg)length;
199 if (s->bi_valid > (int)Buf_size - length) {
200 s->bi_buf |= (ush)value << s->bi_valid;
201 put_short(s, s->bi_buf);
202 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
203 s->bi_valid += length - Buf_size;
205 s->bi_buf |= (ush)value << s->bi_valid;
206 s->bi_valid += length;
211 #define send_bits(s, value, length) \ argument
213 if (s->bi_valid > (int)Buf_size - len) {\
215 s->bi_buf |= (ush)val << s->bi_valid;\
216 put_short(s, s->bi_buf);\
217 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
218 s->bi_valid += len - Buf_size;\
220 s->bi_buf |= (ush)(value) << s->bi_valid;\
221 s->bi_valid += len;\
379 void ZLIB_INTERNAL _tr_init(s) in _tr_init() argument
380 deflate_state *s; in _tr_init()
384 s->l_desc.dyn_tree = s->dyn_ltree;
385 s->l_desc.stat_desc = &static_l_desc;
387 s->d_desc.dyn_tree = s->dyn_dtree;
388 s->d_desc.stat_desc = &static_d_desc;
390 s->bl_desc.dyn_tree = s->bl_tree;
391 s->bl_desc.stat_desc = &static_bl_desc;
393 s->bi_buf = 0;
394 s->bi_valid = 0;
396 s->compressed_len = 0L;
397 s->bits_sent = 0L;
401 init_block(s);
407 local void init_block(s) in init_block() argument
408 deflate_state *s; in init_block()
413 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
414 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
415 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
417 s->dyn_ltree[END_BLOCK].Freq = 1;
418 s->opt_len = s->static_len = 0L;
419 s->sym_next = s->matches = 0;
430 #define pqremove(s, tree, top) \ argument
432 top = s->heap[SMALLEST]; \
433 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
434 pqdownheap(s, tree, SMALLEST); \
451 local void pqdownheap(s, tree, k) in pqdownheap() argument
452 deflate_state *s; in pqdownheap()
456 int v = s->heap[k];
458 while (j <= s->heap_len) {
460 if (j < s->heap_len &&
461 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
465 if (smaller(tree, v, s->heap[j], s->depth)) break;
468 s->heap[k] = s->heap[j]; k = j;
473 s->heap[k] = v;
486 local void gen_bitlen(s, desc) in gen_bitlen() argument
487 deflate_state *s; in gen_bitlen()
503 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
508 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
510 for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
511 n = s->heap[h];
519 s->bl_count[bits]++;
523 s->opt_len += (ulg)f * (unsigned)(bits + xbits);
524 if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
534 while (s->bl_count[bits] == 0) bits--;
535 s->bl_count[bits]--; /* move one leaf down the tree */
536 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
537 s->bl_count[max_length]--;
550 n = s->bl_count[bits];
552 m = s->heap[--h];
556 s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq;
615 local void build_tree(s, desc) in build_tree() argument
616 deflate_state *s; in build_tree()
630 s->heap_len = 0, s->heap_max = HEAP_SIZE;
634 s->heap[++(s->heap_len)] = max_code = n;
635 s->depth[n] = 0;
646 while (s->heap_len < 2) {
647 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
649 s->depth[node] = 0;
650 s->opt_len--; if (stree) s->static_len -= stree[node].Len;
658 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
665 pqremove(s, tree, n); /* n = node of least frequency */
666 m = s->heap[SMALLEST]; /* m = node of next least frequency */
668 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
669 s->heap[--(s->heap_max)] = m;
673 s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ?
674 s->depth[n] : s->depth[m]) + 1);
677 if (tree == s->bl_tree) {
683 s->heap[SMALLEST] = node++;
684 pqdownheap(s, tree, SMALLEST);
686 } while (s->heap_len >= 2);
688 s->heap[--(s->heap_max)] = s->heap[SMALLEST];
693 gen_bitlen(s, (tree_desc *)desc);
696 gen_codes ((ct_data *)tree, max_code, s->bl_count);
703 local void scan_tree (s, tree, max_code) in scan_tree() argument
704 deflate_state *s; in scan_tree()
724 s->bl_tree[curlen].Freq += count;
726 if (curlen != prevlen) s->bl_tree[curlen].Freq++;
727 s->bl_tree[REP_3_6].Freq++;
729 s->bl_tree[REPZ_3_10].Freq++;
731 s->bl_tree[REPZ_11_138].Freq++;
748 local void send_tree (s, tree, max_code) in send_tree() argument
749 deflate_state *s; in send_tree()
769 do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
773 send_code(s, curlen, s->bl_tree); count--;
776 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
779 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
782 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
799 local int build_bl_tree(s) in build_bl_tree() argument
800 deflate_state *s; in build_bl_tree()
805 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
806 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
809 build_tree(s, (tree_desc *)(&(s->bl_desc)));
819 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
822 s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
824 s->opt_len, s->static_len));
834 local void send_all_trees(s, lcodes, dcodes, blcodes) in send_all_trees() argument
835 deflate_state *s; in send_all_trees()
844 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
845 send_bits(s, dcodes-1, 5);
846 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
849 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
851 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
853 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
854 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
856 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
857 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
863 void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last) in _tr_stored_block() argument
864 deflate_state *s; in _tr_stored_block()
869 send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
870 bi_windup(s); /* align on byte boundary */
871 put_short(s, (ush)stored_len);
872 put_short(s, (ush)~stored_len);
874 zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len);
875 s->pending += stored_len;
877 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
878 s->compressed_len += (stored_len + 4) << 3;
879 s->bits_sent += 2*16;
880 s->bits_sent += stored_len<<3;
887 void ZLIB_INTERNAL _tr_flush_bits(s) in _tr_flush_bits() argument
888 deflate_state *s; in _tr_flush_bits()
890 bi_flush(s);
897 void ZLIB_INTERNAL _tr_align(s) in _tr_align() argument
898 deflate_state *s; in _tr_align()
900 send_bits(s, STATIC_TREES<<1, 3);
901 send_code(s, END_BLOCK, static_ltree);
903 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
905 bi_flush(s);
912 void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) in _tr_flush_block() argument
913 deflate_state *s; in _tr_flush_block()
922 if (s->level > 0) {
925 if (s->strm->data_type == Z_UNKNOWN)
926 s->strm->data_type = detect_data_type(s);
929 build_tree(s, (tree_desc *)(&(s->l_desc)));
930 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
931 s->static_len));
933 build_tree(s, (tree_desc *)(&(s->d_desc)));
934 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
935 s->static_len));
943 max_blindex = build_bl_tree(s);
946 opt_lenb = (s->opt_len+3+7)>>3;
947 static_lenb = (s->static_len+3+7)>>3;
950 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
951 s->sym_next / 3));
972 _tr_stored_block(s, buf, stored_len, last);
977 } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
979 send_bits(s, (STATIC_TREES<<1)+last, 3);
980 compress_block(s, (const ct_data *)static_ltree,
983 s->compressed_len += 3 + s->static_len;
986 send_bits(s, (DYN_TREES<<1)+last, 3);
987 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
989 compress_block(s, (const ct_data *)s->dyn_ltree,
990 (const ct_data *)s->dyn_dtree);
992 s->compressed_len += 3 + s->opt_len;
995 Assert (s->compressed_len == s->bits_sent, "bad compressed size");
999 init_block(s);
1002 bi_windup(s);
1004 s->compressed_len += 7; /* align on byte boundary */
1007 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
1008 s->compressed_len-7*last));
1015 int ZLIB_INTERNAL _tr_tally (s, dist, lc) in _tr_tally() argument
1016 deflate_state *s; in _tr_tally()
1020 s->sym_buf[s->sym_next++] = dist;
1021 s->sym_buf[s->sym_next++] = dist >> 8;
1022 s->sym_buf[s->sym_next++] = lc;
1025 s->dyn_ltree[lc].Freq++;
1027 s->matches++;
1030 Assert((ush)dist < (ush)MAX_DIST(s) &&
1034 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
1035 s->dyn_dtree[d_code(dist)].Freq++;
1037 return (s->sym_next == s->sym_end);
1043 local void compress_block(s, ltree, dtree) in compress_block() argument
1044 deflate_state *s; in compress_block()
1054 if (s->sym_next != 0) do {
1055 dist = s->sym_buf[sx++] & 0xff;
1056 dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8;
1057 lc = s->sym_buf[sx++];
1059 send_code(s, lc, ltree); /* send a literal byte */
1064 send_code(s, code+LITERALS+1, ltree); /* send the length code */
1068 send_bits(s, lc, extra); /* send the extra length bits */
1074 send_code(s, code, dtree); /* send the distance code */
1078 send_bits(s, dist, extra); /* send the extra distance bits */
1083 Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
1085 } while (sx < s->sym_next);
1087 send_code(s, END_BLOCK, ltree);
1103 local int detect_data_type(s) in detect_data_type() argument
1104 deflate_state *s; in detect_data_type()
1115 if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0))
1119 if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
1120 || s->dyn_ltree[13].Freq != 0)
1123 if (s->dyn_ltree[n].Freq != 0)
1152 local void bi_flush(s) in bi_flush() argument
1153 deflate_state *s; in bi_flush()
1155 if (s->bi_valid == 16) {
1156 put_short(s, s->bi_buf);
1157 s->bi_buf = 0;
1158 s->bi_valid = 0;
1159 } else if (s->bi_valid >= 8) {
1160 put_byte(s, (Byte)s->bi_buf);
1161 s->bi_buf >>= 8;
1162 s->bi_valid -= 8;
1169 local void bi_windup(s) in bi_windup() argument
1170 deflate_state *s; in bi_windup()
1172 if (s->bi_valid > 8) {
1173 put_short(s, s->bi_buf);
1174 } else if (s->bi_valid > 0) {
1175 put_byte(s, (Byte)s->bi_buf);
1177 s->bi_buf = 0;
1178 s->bi_valid = 0;
1180 s->bits_sent = (s->bits_sent+7) & ~7;