1 #ifndef _BCACHE_EXTENTS_H
2 #define _BCACHE_EXTENTS_H
7 struct btree_node_iter;
9 struct btree_insert_entry;
10 struct extent_insert_hook;
12 struct btree_nr_keys bch2_key_sort_fix_overlapping(struct bset *,
14 struct btree_node_iter *);
15 struct btree_nr_keys bch2_extent_sort_fix_overlapping(struct bch_fs *c,
18 struct btree_node_iter *);
20 extern const struct bkey_ops bch2_bkey_btree_ops;
21 extern const struct bkey_ops bch2_bkey_extent_ops;
26 struct extent_pick_ptr {
27 struct bch_extent_crc128 crc;
28 struct bch_extent_ptr ptr;
32 struct extent_pick_ptr
33 bch2_btree_pick_ptr(struct bch_fs *, const struct btree *);
35 void bch2_extent_pick_ptr_avoiding(struct bch_fs *, struct bkey_s_c,
36 struct bch_dev *, struct extent_pick_ptr *);
39 bch2_extent_pick_ptr(struct bch_fs *c, struct bkey_s_c k,
40 struct extent_pick_ptr *ret)
42 bch2_extent_pick_ptr_avoiding(c, k, NULL, ret);
46 bch2_insert_fixup_extent(struct btree_insert *,
47 struct btree_insert_entry *);
49 bool bch2_extent_normalize(struct bch_fs *, struct bkey_s);
50 void bch2_extent_mark_replicas_cached(struct bch_fs *,
51 struct bkey_s_extent, unsigned);
53 unsigned bch2_extent_nr_ptrs(struct bkey_s_c_extent);
54 unsigned bch2_extent_nr_dirty_ptrs(struct bkey_s_c);
56 static inline bool bkey_extent_is_data(const struct bkey *k)
60 case BCH_EXTENT_CACHED:
67 static inline bool bkey_extent_is_allocation(const struct bkey *k)
71 case BCH_EXTENT_CACHED:
79 static inline bool bkey_extent_is_cached(const struct bkey *k)
81 return k->type == BCH_EXTENT_CACHED;
84 static inline void bkey_extent_set_cached(struct bkey *k, bool cached)
86 EBUG_ON(k->type != BCH_EXTENT &&
87 k->type != BCH_EXTENT_CACHED);
89 k->type = cached ? BCH_EXTENT_CACHED : BCH_EXTENT;
92 static inline unsigned
93 __extent_entry_type(const union bch_extent_entry *e)
95 return e->type ? __ffs(e->type) : BCH_EXTENT_ENTRY_MAX;
98 static inline enum bch_extent_entry_type
99 extent_entry_type(const union bch_extent_entry *e)
101 int ret = __ffs(e->type);
103 EBUG_ON(ret < 0 || ret >= BCH_EXTENT_ENTRY_MAX);
108 static inline size_t extent_entry_bytes(const union bch_extent_entry *entry)
110 switch (extent_entry_type(entry)) {
111 case BCH_EXTENT_ENTRY_crc32:
112 return sizeof(struct bch_extent_crc32);
113 case BCH_EXTENT_ENTRY_crc64:
114 return sizeof(struct bch_extent_crc64);
115 case BCH_EXTENT_ENTRY_crc128:
116 return sizeof(struct bch_extent_crc128);
117 case BCH_EXTENT_ENTRY_ptr:
118 return sizeof(struct bch_extent_ptr);
124 static inline size_t extent_entry_u64s(const union bch_extent_entry *entry)
126 return extent_entry_bytes(entry) / sizeof(u64);
129 static inline bool extent_entry_is_ptr(const union bch_extent_entry *e)
131 return extent_entry_type(e) == BCH_EXTENT_ENTRY_ptr;
134 static inline bool extent_entry_is_crc(const union bch_extent_entry *e)
136 return !extent_entry_is_ptr(e);
139 union bch_extent_crc {
141 struct bch_extent_crc32 crc32;
142 struct bch_extent_crc64 crc64;
143 struct bch_extent_crc128 crc128;
146 /* downcast, preserves const */
147 #define to_entry(_entry) \
149 BUILD_BUG_ON(!type_is(_entry, union bch_extent_crc *) && \
150 !type_is(_entry, struct bch_extent_ptr *)); \
152 __builtin_choose_expr( \
153 (type_is_exact(_entry, const union bch_extent_crc *) || \
154 type_is_exact(_entry, const struct bch_extent_ptr *)), \
155 (const union bch_extent_entry *) (_entry), \
156 (union bch_extent_entry *) (_entry)); \
159 #define __entry_to_crc(_entry) \
160 __builtin_choose_expr( \
161 type_is_exact(_entry, const union bch_extent_entry *), \
162 (const union bch_extent_crc *) (_entry), \
163 (union bch_extent_crc *) (_entry))
165 #define entry_to_crc(_entry) \
167 EBUG_ON((_entry) && !extent_entry_is_crc(_entry)); \
169 __entry_to_crc(_entry); \
172 #define entry_to_ptr(_entry) \
174 EBUG_ON((_entry) && !extent_entry_is_ptr(_entry)); \
176 __builtin_choose_expr( \
177 type_is_exact(_entry, const union bch_extent_entry *), \
178 (const struct bch_extent_ptr *) (_entry), \
179 (struct bch_extent_ptr *) (_entry)); \
182 enum bch_extent_crc_type {
189 static inline enum bch_extent_crc_type
190 __extent_crc_type(const union bch_extent_crc *crc)
193 return BCH_EXTENT_CRC_NONE;
195 switch (extent_entry_type(to_entry(crc))) {
196 case BCH_EXTENT_ENTRY_crc32:
197 return BCH_EXTENT_CRC32;
198 case BCH_EXTENT_ENTRY_crc64:
199 return BCH_EXTENT_CRC64;
200 case BCH_EXTENT_ENTRY_crc128:
201 return BCH_EXTENT_CRC128;
207 #define extent_crc_type(_crc) \
209 BUILD_BUG_ON(!type_is(_crc, struct bch_extent_crc32 *) && \
210 !type_is(_crc, struct bch_extent_crc64 *) && \
211 !type_is(_crc, struct bch_extent_crc128 *) && \
212 !type_is(_crc, union bch_extent_crc *)); \
214 type_is(_crc, struct bch_extent_crc32 *) ? BCH_EXTENT_CRC32 \
215 : type_is(_crc, struct bch_extent_crc64 *) ? BCH_EXTENT_CRC64 \
216 : type_is(_crc, struct bch_extent_crc128 *) ? BCH_EXTENT_CRC128 \
217 : __extent_crc_type((union bch_extent_crc *) _crc); \
220 #define extent_entry_next(_entry) \
221 ((typeof(_entry)) ((void *) (_entry) + extent_entry_bytes(_entry)))
223 #define extent_entry_last(_e) \
224 vstruct_idx((_e).v, bkey_val_u64s((_e).k))
226 /* Iterate over all entries: */
228 #define extent_for_each_entry_from(_e, _entry, _start) \
229 for ((_entry) = _start; \
230 (_entry) < extent_entry_last(_e); \
231 (_entry) = extent_entry_next(_entry))
233 #define extent_for_each_entry(_e, _entry) \
234 extent_for_each_entry_from(_e, _entry, (_e).v->start)
236 /* Iterate over crcs only: */
238 #define extent_crc_next(_e, _p) \
240 typeof(&(_e).v->start[0]) _entry = _p; \
242 while ((_entry) < extent_entry_last(_e) && \
243 !extent_entry_is_crc(_entry)) \
244 (_entry) = extent_entry_next(_entry); \
246 entry_to_crc(_entry < extent_entry_last(_e) ? _entry : NULL); \
249 #define extent_for_each_crc(_e, _crc) \
250 for ((_crc) = extent_crc_next(_e, (_e).v->start); \
252 (_crc) = extent_crc_next(_e, extent_entry_next(to_entry(_crc))))
254 /* Iterate over pointers, with crcs: */
256 #define extent_ptr_crc_next_filter(_e, _crc, _ptr, _filter) \
259 typeof(&(_e).v->start[0]) _entry; \
261 extent_for_each_entry_from(_e, _entry, to_entry(_ptr)) \
262 if (extent_entry_is_crc(_entry)) { \
263 (_crc) = entry_to_crc(_entry); \
265 _ptr = entry_to_ptr(_entry); \
275 #define extent_for_each_ptr_crc_filter(_e, _ptr, _crc, _filter) \
276 for ((_crc) = NULL, \
277 (_ptr) = &(_e).v->start->ptr; \
278 ((_ptr) = extent_ptr_crc_next_filter(_e, _crc, _ptr, _filter));\
281 #define extent_for_each_ptr_crc(_e, _ptr, _crc) \
282 extent_for_each_ptr_crc_filter(_e, _ptr, _crc, true)
284 /* Iterate over pointers only, and from a given position: */
286 #define extent_ptr_next_filter(_e, _ptr, _filter) \
288 typeof(__entry_to_crc(&(_e).v->start[0])) _crc; \
290 extent_ptr_crc_next_filter(_e, _crc, _ptr, _filter); \
293 #define extent_ptr_next(_e, _ptr) \
294 extent_ptr_next_filter(_e, _ptr, true)
296 #define extent_for_each_ptr_filter(_e, _ptr, _filter) \
297 for ((_ptr) = &(_e).v->start->ptr; \
298 ((_ptr) = extent_ptr_next_filter(_e, _ptr, _filter)); \
301 #define extent_for_each_ptr(_e, _ptr) \
302 extent_for_each_ptr_filter(_e, _ptr, true)
304 #define extent_ptr_prev(_e, _ptr) \
306 typeof(&(_e).v->start->ptr) _p; \
307 typeof(&(_e).v->start->ptr) _prev = NULL; \
309 extent_for_each_ptr(_e, _p) { \
319 * Use this when you'll be dropping pointers as you iterate. Quadratic,
322 #define extent_for_each_ptr_backwards(_e, _ptr) \
323 for ((_ptr) = extent_ptr_prev(_e, NULL); \
325 (_ptr) = extent_ptr_prev(_e, _ptr))
327 void bch2_extent_crc_append(struct bkey_i_extent *, unsigned, unsigned,
328 unsigned, unsigned, struct bch_csum, unsigned);
330 static inline void __extent_entry_push(struct bkey_i_extent *e)
332 union bch_extent_entry *entry = extent_entry_last(extent_i_to_s(e));
334 EBUG_ON(bkey_val_u64s(&e->k) + extent_entry_u64s(entry) >
335 BKEY_EXTENT_VAL_U64s_MAX);
337 e->k.u64s += extent_entry_u64s(entry);
340 static inline void extent_ptr_append(struct bkey_i_extent *e,
341 struct bch_extent_ptr ptr)
343 ptr.type = 1 << BCH_EXTENT_ENTRY_ptr;
344 extent_entry_last(extent_i_to_s(e))->ptr = ptr;
345 __extent_entry_push(e);
348 static inline struct bch_extent_crc128 crc_to_128(const struct bkey *k,
349 const union bch_extent_crc *crc)
353 switch (extent_crc_type(crc)) {
354 case BCH_EXTENT_CRC_NONE:
355 return (struct bch_extent_crc128) {
356 ._compressed_size = k->size - 1,
357 ._uncompressed_size = k->size - 1,
359 case BCH_EXTENT_CRC32:
360 return (struct bch_extent_crc128) {
361 .type = 1 << BCH_EXTENT_ENTRY_crc128,
362 ._compressed_size = crc->crc32._compressed_size,
363 ._uncompressed_size = crc->crc32._uncompressed_size,
364 .offset = crc->crc32.offset,
365 .csum_type = crc->crc32.csum_type,
366 .compression_type = crc->crc32.compression_type,
367 .csum.lo = crc->crc32.csum,
369 case BCH_EXTENT_CRC64:
370 return (struct bch_extent_crc128) {
371 .type = 1 << BCH_EXTENT_ENTRY_crc128,
372 ._compressed_size = crc->crc64._compressed_size,
373 ._uncompressed_size = crc->crc64._uncompressed_size,
374 .offset = crc->crc64.offset,
375 .nonce = crc->crc64.nonce,
376 .csum_type = crc->crc64.csum_type,
377 .compression_type = crc->crc64.compression_type,
378 .csum.lo = crc->crc64.csum_lo,
379 .csum.hi = crc->crc64.csum_hi,
381 case BCH_EXTENT_CRC128:
388 #define crc_compressed_size(_k, _crc) \
390 unsigned _size = 0; \
392 switch (extent_crc_type(_crc)) { \
393 case BCH_EXTENT_CRC_NONE: \
394 _size = ((const struct bkey *) (_k))->size; \
396 case BCH_EXTENT_CRC32: \
397 _size = ((struct bch_extent_crc32 *) _crc) \
398 ->_compressed_size + 1; \
400 case BCH_EXTENT_CRC64: \
401 _size = ((struct bch_extent_crc64 *) _crc) \
402 ->_compressed_size + 1; \
404 case BCH_EXTENT_CRC128: \
405 _size = ((struct bch_extent_crc128 *) _crc) \
406 ->_compressed_size + 1; \
412 #define crc_uncompressed_size(_k, _crc) \
414 unsigned _size = 0; \
416 switch (extent_crc_type(_crc)) { \
417 case BCH_EXTENT_CRC_NONE: \
418 _size = ((const struct bkey *) (_k))->size; \
420 case BCH_EXTENT_CRC32: \
421 _size = ((struct bch_extent_crc32 *) _crc) \
422 ->_uncompressed_size + 1; \
424 case BCH_EXTENT_CRC64: \
425 _size = ((struct bch_extent_crc64 *) _crc) \
426 ->_uncompressed_size + 1; \
428 case BCH_EXTENT_CRC128: \
429 _size = ((struct bch_extent_crc128 *) _crc) \
430 ->_uncompressed_size + 1; \
436 static inline unsigned crc_offset(const union bch_extent_crc *crc)
438 switch (extent_crc_type(crc)) {
439 case BCH_EXTENT_CRC_NONE:
441 case BCH_EXTENT_CRC32:
442 return crc->crc32.offset;
443 case BCH_EXTENT_CRC64:
444 return crc->crc64.offset;
445 case BCH_EXTENT_CRC128:
446 return crc->crc128.offset;
452 static inline unsigned crc_nonce(const union bch_extent_crc *crc)
454 switch (extent_crc_type(crc)) {
455 case BCH_EXTENT_CRC_NONE:
456 case BCH_EXTENT_CRC32:
458 case BCH_EXTENT_CRC64:
459 return crc->crc64.nonce;
460 case BCH_EXTENT_CRC128:
461 return crc->crc128.nonce;
467 static inline unsigned crc_csum_type(const union bch_extent_crc *crc)
469 switch (extent_crc_type(crc)) {
470 case BCH_EXTENT_CRC_NONE:
472 case BCH_EXTENT_CRC32:
473 return crc->crc32.csum_type;
474 case BCH_EXTENT_CRC64:
475 return crc->crc64.csum_type;
476 case BCH_EXTENT_CRC128:
477 return crc->crc128.csum_type;
483 static inline unsigned crc_compression_type(const union bch_extent_crc *crc)
485 switch (extent_crc_type(crc)) {
486 case BCH_EXTENT_CRC_NONE:
488 case BCH_EXTENT_CRC32:
489 return crc->crc32.compression_type;
490 case BCH_EXTENT_CRC64:
491 return crc->crc64.compression_type;
492 case BCH_EXTENT_CRC128:
493 return crc->crc128.compression_type;
499 static inline struct bch_csum crc_csum(const union bch_extent_crc *crc)
501 switch (extent_crc_type(crc)) {
502 case BCH_EXTENT_CRC_NONE:
503 return (struct bch_csum) { 0 };
504 case BCH_EXTENT_CRC32:
505 return (struct bch_csum) { .lo = crc->crc32.csum };
506 case BCH_EXTENT_CRC64:
507 return (struct bch_csum) {
508 .lo = crc->crc64.csum_lo,
509 .hi = crc->crc64.csum_hi,
511 case BCH_EXTENT_CRC128:
512 return crc->crc128.csum;
518 static inline unsigned bkey_extent_is_compressed(struct bkey_s_c k)
520 struct bkey_s_c_extent e;
521 const struct bch_extent_ptr *ptr;
522 const union bch_extent_crc *crc;
527 case BCH_EXTENT_CACHED:
528 e = bkey_s_c_to_extent(k);
530 extent_for_each_ptr_crc(e, ptr, crc)
532 crc_compression_type(crc) != BCH_COMPRESSION_NONE &&
533 crc_compressed_size(e.k, crc) < k.k->size)
534 ret = max_t(unsigned, ret,
535 crc_compressed_size(e.k, crc));
541 static inline unsigned extent_current_nonce(struct bkey_s_c_extent e)
543 const union bch_extent_crc *crc;
545 extent_for_each_crc(e, crc)
546 if (bch2_csum_type_is_encryption(crc_csum_type(crc)))
547 return crc_offset(crc) + crc_nonce(crc);
552 void bch2_extent_narrow_crcs(struct bkey_s_extent);
553 void bch2_extent_drop_redundant_crcs(struct bkey_s_extent);
555 void __bch2_extent_drop_ptr(struct bkey_s_extent, struct bch_extent_ptr *);
556 void bch2_extent_drop_ptr(struct bkey_s_extent, struct bch_extent_ptr *);
557 void bch2_extent_drop_ptr_idx(struct bkey_s_extent, unsigned);
559 const struct bch_extent_ptr *
560 bch2_extent_has_device(struct bkey_s_c_extent, unsigned);
562 bool bch2_cut_front(struct bpos, struct bkey_i *);
563 bool bch2_cut_back(struct bpos, struct bkey *);
564 void bch2_key_resize(struct bkey *, unsigned);
566 #endif /* _BCACHE_EXTENTS_H */