X-Git-Url: https://git.sesse.net/?p=fjl;a=blobdiff_plain;f=dehuff.h;h=7621f6095e99791feb6138e81c2574ae9dda6bce;hp=69fc798ddd1f426186576e971126101734531b94;hb=a4009687c73083dd0290285a065740a83e27e855;hpb=fc9f7affda0b0141099f8e91c1a36ddceed8319c diff --git a/dehuff.h b/dehuff.h index 69fc798..7621f60 100644 --- a/dehuff.h +++ b/dehuff.h @@ -48,12 +48,11 @@ void read_huffman_tables(huffman_tables_t* dst, input_func_t* input_func, void* unsigned read_huffman_symbol_slow_path(const struct huffman_table* table, struct bit_source* source); -static inline unsigned read_huffman_symbol(const struct huffman_table* table, - struct bit_source* source) +static inline unsigned read_huffman_symbol_no_refill( + const struct huffman_table* table, + struct bit_source* source) { - // FIXME: We can read past the end of the stream here in some edge - // cases. We need to define some guarantees in the layers above. - possibly_refill(source, DEHUF_TABLE_BITS); + assert(source->bits_available >= DEHUF_TABLE_BITS); unsigned lookup = peek_bits(source, DEHUF_TABLE_BITS); int code = table->lookup_table_codes[lookup]; int length = table->lookup_table_length[lookup]; @@ -61,27 +60,46 @@ static inline unsigned read_huffman_symbol(const struct huffman_table* table, if (code == DEHUF_SLOW_PATH) { return read_huffman_symbol_slow_path(table, source); } - + read_bits(source, length); return code; } +static inline unsigned read_huffman_symbol(const struct huffman_table* table, + struct bit_source* source) +{ + possibly_refill(source, DEHUF_TABLE_BITS); + return read_huffman_symbol_no_refill(table, source); +} + // procedure EXTEND (figure F.12) // Fast lookup table for (1 << (bits - 1)). // The table actually helps, since the load can go in parallel with the shift // operation below. static const int bit_thresholds[16] = { - 0, 1 << 0, 1 << 2, 1 << 3, 1 << 4, 1 << 5, 1 << 6, 1 << 7, 1 << 8, 1 << 9, 1 << 10, 1 << 11, 1 << 12, 1 << 13, 1 << 14, 1 << 15 + 0, 1 << 0, 1 << 1, 1 << 2, 1 << 3, 1 << 4, 1 << 5, 1 << 6, 1 << 7, 1 << 8, 1 << 9, 1 << 10, 1 << 11, 1 << 12, 1 << 13, 1 << 14 }; -static inline unsigned extend(int val, int bits) +static inline unsigned extend(int val, unsigned bits) { +#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) + // GCC should ideally be able to figure out that the conditional move is better, but + // it doesn't for various reasons, and this is pretty important for speed, so we hardcode. + asm("cmp %2, %0 ; cmovl %3, %0" + : "=r" (val) + : "0" (val), + "g" (bit_thresholds[bits]), + "r" (val + (-1 << bits) + 1) + : "cc"); + return val; +#else if (val < bit_thresholds[bits]) { return val + (-1 << bits) + 1; } else { return val; } +#endif } #endif /* !defined(_DEHUFF_H) */