2 * lz4defs.h -- architecture specific defines
4 * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 * Detects 64 bits mode
14 #if __SIZEOF_POINTER__ == 8
20 #include <linux/string.h>
21 #include <asm/unaligned.h>
23 #define A32(_p) get_unaligned((u32 *) (_p))
24 #define A16(_p) get_unaligned((u16 *) (_p))
26 #define GET_LE16_ADVANCE(_src) \
28 u16 _r = get_unaligned_le16(_src); \
33 #define PUT_LE16_ADVANCE(_dst, _v) \
35 put_unaligned_le16((_v), (_dst)); \
39 #define LENGTH_LONG 15
42 #define ML_MASK ((1U << ML_BITS) - 1)
43 #define RUN_BITS (8 - ML_BITS)
44 #define RUN_MASK ((1U << RUN_BITS) - 1)
45 #define MEMORY_USAGE 14
47 #define SKIPSTRENGTH 6
48 #define LASTLITERALS 5
49 #define MFLIMIT (COPYLENGTH + MINMATCH)
50 #define MINLENGTH (MFLIMIT + 1)
52 #define MAXD (1 << MAXD_LOG)
53 #define MAXD_MASK (u32)(MAXD - 1)
54 #define MAX_DISTANCE (MAXD - 1)
55 #define HASH_LOG (MAXD_LOG - 1)
56 #define HASHTABLESIZE (1 << HASH_LOG)
57 #define MAX_NB_ATTEMPTS 256
58 #define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
59 #define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
61 #define __HASH_VALUE(p, bits) \
62 (((A32(p)) * 2654435761U) >> (32 - (bits)))
64 #define HASH_VALUE(p) __HASH_VALUE(p, HASH_LOG)
66 #define MEMCPY_ADVANCE(_dst, _src, length) \
68 typeof(length) _length = (length); \
69 memcpy(_dst, _src, _length); \
74 #define MEMCPY_ADVANCE_BYTES(_dst, _src, _length) \
76 const u8 *_end = (_src) + (_length); \
77 while ((_src) < _end) \
81 #define STEPSIZE __SIZEOF_LONG__
83 #define LZ4_COPYPACKET(_src, _dst) \
85 MEMCPY_ADVANCE(_dst, _src, STEPSIZE); \
86 MEMCPY_ADVANCE(_dst, _src, COPYLENGTH - STEPSIZE);\
90 * Equivalent to MEMCPY_ADVANCE - except may overrun @_dst and @_src by
93 * Note: src and dst may overlap (with src < dst) - we must do the copy in
94 * STEPSIZE chunks for correctness
96 * Note also: length may be negative - we must not call memcpy if length is
97 * negative, but still adjust dst and src by length
99 #define MEMCPY_ADVANCE_CHUNKED(_dst, _src, _length) \
101 u8 *_end = (_dst) + (_length); \
102 while ((_dst) < _end) \
103 LZ4_COPYPACKET(_src, _dst); \
104 _src -= (_dst) - _end; \
108 #define MEMCPY_ADVANCE_CHUNKED_NOFIXUP(_dst, _src, _end)\
110 while ((_dst) < (_end)) \
111 LZ4_COPYPACKET((_src), (_dst)); \
114 struct lz4_hashtable {
116 const u8 * const base;
127 #define HTYPE const u8*
131 #define LZ4_NBCOMMONBYTES(val) (__builtin_clzl(val) >> 3)
133 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzl(val) >> 3)
136 static inline unsigned common_length(const u8 *l, const u8 *r,
137 const u8 *const l_end)
139 const u8 *l_start = l;
141 while (likely(l <= l_end - sizeof(long))) {
143 get_unaligned((unsigned long *) l) ^
144 get_unaligned((unsigned long *) r);
147 return l + LZ4_NBCOMMONBYTES(diff) - l_start;
153 if (l <= l_end - 4 && A32(r) == A32(l)) {
158 if (l <= l_end - 2 && A16(r) == A16(l)) {
162 if (l <= l_end - 1 && *r == *l) {
170 static inline unsigned encode_length(u8 **op, unsigned length)
172 if (length >= LENGTH_LONG) {
173 length -= LENGTH_LONG;
175 for (; length > 254 ; length -= 255)