]> git.sesse.net Git - bcachefs-tools-debian/blob - linux/lz4defs.h
Fix initialization order bug
[bcachefs-tools-debian] / linux / lz4defs.h
1 /*
2  * lz4defs.h -- architecture specific defines
3  *
4  * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10
11 /*
12  * Detects 64 bits mode
13  */
14 #if __SIZEOF_POINTER__ == 8
15 #define LZ4_ARCH64 1
16 #else
17 #define LZ4_ARCH64 0
18 #endif
19
20 #include <linux/string.h>
21 #include <asm/unaligned.h>
22
23 #define A32(_p) get_unaligned((u32 *) (_p))
24 #define A16(_p) get_unaligned((u16 *) (_p))
25
26 #define GET_LE16_ADVANCE(_src)                          \
27 ({                                                      \
28         u16 _r = get_unaligned_le16(_src);              \
29         (_src) += 2;                                    \
30         _r;                                             \
31 })
32
33 #define PUT_LE16_ADVANCE(_dst, _v)                      \
34 do {                                                    \
35         put_unaligned_le16((_v), (_dst));               \
36         (_dst) += 2;                                    \
37 } while (0)
38
39 #define LENGTH_LONG             15
40 #define COPYLENGTH              8
41 #define ML_BITS                 4
42 #define ML_MASK                 ((1U << ML_BITS) - 1)
43 #define RUN_BITS                (8 - ML_BITS)
44 #define RUN_MASK                ((1U << RUN_BITS) - 1)
45 #define MEMORY_USAGE            14
46 #define MINMATCH                4
47 #define SKIPSTRENGTH            6
48 #define LASTLITERALS            5
49 #define MFLIMIT                 (COPYLENGTH + MINMATCH)
50 #define MINLENGTH               (MFLIMIT + 1)
51 #define MAXD_LOG                16
52 #define MAXD                    (1 << MAXD_LOG)
53 #define MAXD_MASK               (u32)(MAXD - 1)
54 #define MAX_DISTANCE            (MAXD - 1)
55 #define HASH_LOG                (MAXD_LOG - 1)
56 #define HASHTABLESIZE           (1 << HASH_LOG)
57 #define MAX_NB_ATTEMPTS         256
58 #define OPTIMAL_ML              (int)((ML_MASK-1)+MINMATCH)
59 #define LZ4_64KLIMIT            ((1<<16) + (MFLIMIT - 1))
60
61 #define __HASH_VALUE(p, bits)                           \
62         (((A32(p)) * 2654435761U) >> (32 - (bits)))
63
64 #define HASH_VALUE(p)           __HASH_VALUE(p, HASH_LOG)
65
66 #define MEMCPY_ADVANCE(_dst, _src, length)              \
67 do {                                                    \
68         typeof(length) _length = (length);              \
69         memcpy(_dst, _src, _length);                    \
70         _src += _length;                                \
71         _dst += _length;                                \
72 } while (0)
73
74 #define MEMCPY_ADVANCE_BYTES(_dst, _src, _length)       \
75 do {                                                    \
76         const u8 *_end = (_src) + (_length);            \
77         while ((_src) < _end)                           \
78                 *_dst++ = *_src++;                      \
79 } while (0)
80
81 #define STEPSIZE                __SIZEOF_LONG__
82
83 #define LZ4_COPYPACKET(_src, _dst)                      \
84 do {                                                    \
85         MEMCPY_ADVANCE(_dst, _src, STEPSIZE);           \
86         MEMCPY_ADVANCE(_dst, _src, COPYLENGTH - STEPSIZE);\
87 } while (0)
88
89 /*
90  * Equivalent to MEMCPY_ADVANCE - except may overrun @_dst and @_src by
91  * COPYLENGTH:
92  *
93  * Note: src and dst may overlap (with src < dst) - we must do the copy in
94  * STEPSIZE chunks for correctness
95  *
96  * Note also: length may be negative - we must not call memcpy if length is
97  * negative, but still adjust dst and src by length
98  */
99 #define MEMCPY_ADVANCE_CHUNKED(_dst, _src, _length)     \
100 do {                                                    \
101         u8 *_end = (_dst) + (_length);                  \
102         while ((_dst) < _end)                           \
103                 LZ4_COPYPACKET(_src, _dst);             \
104         _src -= (_dst) - _end;                          \
105         _dst = _end;                                    \
106 } while (0)
107
108 #define MEMCPY_ADVANCE_CHUNKED_NOFIXUP(_dst, _src, _end)\
109 do {                                                    \
110         while ((_dst) < (_end))                         \
111                 LZ4_COPYPACKET((_src), (_dst));         \
112 } while (0)
113
114 struct lz4_hashtable {
115 #if LZ4_ARCH64
116         const u8 * const        base;
117         u32                     *table;
118 #else
119         const int               base;
120         const u8                *table;
121 #endif
122 };
123
124 #if LZ4_ARCH64
125 #define HTYPE u32
126 #else   /* 32-bit */
127 #define HTYPE const u8*
128 #endif
129
130 #ifdef __BIG_ENDIAN
131 #define LZ4_NBCOMMONBYTES(val) (__builtin_clzl(val) >> 3)
132 #else
133 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzl(val) >> 3)
134 #endif
135
136 static inline unsigned common_length(const u8 *l, const u8 *r,
137                                      const u8 *const l_end)
138 {
139         const u8 *l_start = l;
140
141         while (likely(l <= l_end - sizeof(long))) {
142                 unsigned long diff =
143                         get_unaligned((unsigned long *) l) ^
144                         get_unaligned((unsigned long *) r);
145
146                 if (diff)
147                         return l + LZ4_NBCOMMONBYTES(diff) - l_start;
148
149                 l += sizeof(long);
150                 r += sizeof(long);
151         }
152 #if LZ4_ARCH64
153         if (l <= l_end - 4 && A32(r) == A32(l)) {
154                 l += 4;
155                 r += 4;
156         }
157 #endif
158         if (l <= l_end - 2 && A16(r) == A16(l)) {
159                 l += 2;
160                 r += 2;
161         }
162         if (l <= l_end - 1 && *r == *l) {
163                 l++;
164                 r++;
165         }
166
167         return l - l_start;
168 }
169
170 static inline unsigned encode_length(u8 **op, unsigned length)
171 {
172         if (length >= LENGTH_LONG) {
173                 length -= LENGTH_LONG;
174
175                 for (; length > 254 ; length -= 255)
176                         *(*op)++ = 255;
177                 *(*op)++ = length;
178                 return LENGTH_LONG;
179         } else
180                 return length;
181 }