1 #ifndef _BCACHEFS_EXTENTS_H
2 #define _BCACHEFS_EXTENTS_H
10 struct btree_node_iter;
12 struct btree_insert_entry;
13 struct extent_insert_hook;
17 struct btree_nr_keys bch2_key_sort_fix_overlapping(struct bset *,
19 struct btree_node_iter *);
20 struct btree_nr_keys bch2_extent_sort_fix_overlapping(struct bch_fs *c,
23 struct btree_node_iter *);
25 extern const struct bkey_ops bch2_bkey_btree_ops;
26 extern const struct bkey_ops bch2_bkey_extent_ops;
28 struct extent_pick_ptr
29 bch2_btree_pick_ptr(struct bch_fs *, const struct btree *,
30 struct bch_devs_mask *avoid);
32 void bch2_extent_pick_ptr(struct bch_fs *, struct bkey_s_c,
33 struct bch_devs_mask *,
34 struct extent_pick_ptr *);
37 bch2_insert_fixup_extent(struct btree_insert *,
38 struct btree_insert_entry *);
40 bool bch2_extent_normalize(struct bch_fs *, struct bkey_s);
41 void bch2_extent_mark_replicas_cached(struct bch_fs *,
42 struct bkey_s_extent, unsigned);
44 unsigned bch2_extent_nr_ptrs(struct bkey_s_c_extent);
45 unsigned bch2_extent_nr_dirty_ptrs(struct bkey_s_c);
47 static inline bool bkey_extent_is_data(const struct bkey *k)
51 case BCH_EXTENT_CACHED:
58 static inline bool bkey_extent_is_allocation(const struct bkey *k)
62 case BCH_EXTENT_CACHED:
70 static inline bool bkey_extent_is_cached(const struct bkey *k)
72 return k->type == BCH_EXTENT_CACHED;
75 static inline void bkey_extent_set_cached(struct bkey *k, bool cached)
77 EBUG_ON(k->type != BCH_EXTENT &&
78 k->type != BCH_EXTENT_CACHED);
80 k->type = cached ? BCH_EXTENT_CACHED : BCH_EXTENT;
83 static inline unsigned
84 __extent_entry_type(const union bch_extent_entry *e)
86 return e->type ? __ffs(e->type) : BCH_EXTENT_ENTRY_MAX;
89 static inline enum bch_extent_entry_type
90 extent_entry_type(const union bch_extent_entry *e)
92 int ret = __ffs(e->type);
94 EBUG_ON(ret < 0 || ret >= BCH_EXTENT_ENTRY_MAX);
99 static inline size_t extent_entry_bytes(const union bch_extent_entry *entry)
101 switch (extent_entry_type(entry)) {
102 case BCH_EXTENT_ENTRY_crc32:
103 return sizeof(struct bch_extent_crc32);
104 case BCH_EXTENT_ENTRY_crc64:
105 return sizeof(struct bch_extent_crc64);
106 case BCH_EXTENT_ENTRY_crc128:
107 return sizeof(struct bch_extent_crc128);
108 case BCH_EXTENT_ENTRY_ptr:
109 return sizeof(struct bch_extent_ptr);
115 static inline size_t extent_entry_u64s(const union bch_extent_entry *entry)
117 return extent_entry_bytes(entry) / sizeof(u64);
120 static inline bool extent_entry_is_ptr(const union bch_extent_entry *e)
122 return extent_entry_type(e) == BCH_EXTENT_ENTRY_ptr;
125 static inline bool extent_entry_is_crc(const union bch_extent_entry *e)
127 return !extent_entry_is_ptr(e);
130 union bch_extent_crc {
132 struct bch_extent_crc32 crc32;
133 struct bch_extent_crc64 crc64;
134 struct bch_extent_crc128 crc128;
137 /* downcast, preserves const */
138 #define to_entry(_entry) \
140 BUILD_BUG_ON(!type_is(_entry, union bch_extent_crc *) && \
141 !type_is(_entry, struct bch_extent_ptr *)); \
143 __builtin_choose_expr( \
144 (type_is_exact(_entry, const union bch_extent_crc *) || \
145 type_is_exact(_entry, const struct bch_extent_ptr *)), \
146 (const union bch_extent_entry *) (_entry), \
147 (union bch_extent_entry *) (_entry)); \
150 #define __entry_to_crc(_entry) \
151 __builtin_choose_expr( \
152 type_is_exact(_entry, const union bch_extent_entry *), \
153 (const union bch_extent_crc *) (_entry), \
154 (union bch_extent_crc *) (_entry))
156 #define entry_to_crc(_entry) \
158 EBUG_ON((_entry) && !extent_entry_is_crc(_entry)); \
160 __entry_to_crc(_entry); \
163 #define entry_to_ptr(_entry) \
165 EBUG_ON((_entry) && !extent_entry_is_ptr(_entry)); \
167 __builtin_choose_expr( \
168 type_is_exact(_entry, const union bch_extent_entry *), \
169 (const struct bch_extent_ptr *) (_entry), \
170 (struct bch_extent_ptr *) (_entry)); \
173 enum bch_extent_crc_type {
180 static inline enum bch_extent_crc_type
181 __extent_crc_type(const union bch_extent_crc *crc)
184 return BCH_EXTENT_CRC_NONE;
186 switch (extent_entry_type(to_entry(crc))) {
187 case BCH_EXTENT_ENTRY_crc32:
188 return BCH_EXTENT_CRC32;
189 case BCH_EXTENT_ENTRY_crc64:
190 return BCH_EXTENT_CRC64;
191 case BCH_EXTENT_ENTRY_crc128:
192 return BCH_EXTENT_CRC128;
198 #define extent_crc_type(_crc) \
200 BUILD_BUG_ON(!type_is(_crc, struct bch_extent_crc32 *) && \
201 !type_is(_crc, struct bch_extent_crc64 *) && \
202 !type_is(_crc, struct bch_extent_crc128 *) && \
203 !type_is(_crc, union bch_extent_crc *)); \
205 type_is(_crc, struct bch_extent_crc32 *) ? BCH_EXTENT_CRC32 \
206 : type_is(_crc, struct bch_extent_crc64 *) ? BCH_EXTENT_CRC64 \
207 : type_is(_crc, struct bch_extent_crc128 *) ? BCH_EXTENT_CRC128 \
208 : __extent_crc_type((union bch_extent_crc *) _crc); \
211 #define extent_entry_next(_entry) \
212 ((typeof(_entry)) ((void *) (_entry) + extent_entry_bytes(_entry)))
214 #define extent_entry_last(_e) \
215 vstruct_idx((_e).v, bkey_val_u64s((_e).k))
217 /* Iterate over all entries: */
219 #define extent_for_each_entry_from(_e, _entry, _start) \
220 for ((_entry) = _start; \
221 (_entry) < extent_entry_last(_e); \
222 (_entry) = extent_entry_next(_entry))
224 #define extent_for_each_entry(_e, _entry) \
225 extent_for_each_entry_from(_e, _entry, (_e).v->start)
227 /* Iterate over crcs only: */
229 #define extent_crc_next(_e, _p) \
231 typeof(&(_e).v->start[0]) _entry = _p; \
233 while ((_entry) < extent_entry_last(_e) && \
234 !extent_entry_is_crc(_entry)) \
235 (_entry) = extent_entry_next(_entry); \
237 entry_to_crc(_entry < extent_entry_last(_e) ? _entry : NULL); \
240 #define extent_for_each_crc(_e, _crc) \
241 for ((_crc) = extent_crc_next(_e, (_e).v->start); \
243 (_crc) = extent_crc_next(_e, extent_entry_next(to_entry(_crc))))
245 /* Iterate over pointers, with crcs: */
247 #define extent_ptr_crc_next_filter(_e, _crc, _ptr, _filter) \
250 typeof(&(_e).v->start[0]) _entry; \
252 extent_for_each_entry_from(_e, _entry, to_entry(_ptr)) \
253 if (extent_entry_is_crc(_entry)) { \
254 (_crc) = entry_to_crc(_entry); \
256 _ptr = entry_to_ptr(_entry); \
266 #define extent_for_each_ptr_crc_filter(_e, _ptr, _crc, _filter) \
267 for ((_crc) = NULL, \
268 (_ptr) = &(_e).v->start->ptr; \
269 ((_ptr) = extent_ptr_crc_next_filter(_e, _crc, _ptr, _filter));\
272 #define extent_for_each_ptr_crc(_e, _ptr, _crc) \
273 extent_for_each_ptr_crc_filter(_e, _ptr, _crc, true)
275 /* Iterate over pointers only, and from a given position: */
277 #define extent_ptr_next_filter(_e, _ptr, _filter) \
279 typeof(__entry_to_crc(&(_e).v->start[0])) _crc; \
281 extent_ptr_crc_next_filter(_e, _crc, _ptr, _filter); \
284 #define extent_ptr_next(_e, _ptr) \
285 extent_ptr_next_filter(_e, _ptr, true)
287 #define extent_for_each_ptr_filter(_e, _ptr, _filter) \
288 for ((_ptr) = &(_e).v->start->ptr; \
289 ((_ptr) = extent_ptr_next_filter(_e, _ptr, _filter)); \
292 #define extent_for_each_ptr(_e, _ptr) \
293 extent_for_each_ptr_filter(_e, _ptr, true)
295 #define extent_ptr_prev(_e, _ptr) \
297 typeof(&(_e).v->start->ptr) _p; \
298 typeof(&(_e).v->start->ptr) _prev = NULL; \
300 extent_for_each_ptr(_e, _p) { \
310 * Use this when you'll be dropping pointers as you iterate. Quadratic,
313 #define extent_for_each_ptr_backwards(_e, _ptr) \
314 for ((_ptr) = extent_ptr_prev(_e, NULL); \
316 (_ptr) = extent_ptr_prev(_e, _ptr))
318 void bch2_extent_crc_append(struct bkey_i_extent *, unsigned, unsigned,
319 unsigned, unsigned, struct bch_csum, unsigned);
321 static inline void __extent_entry_push(struct bkey_i_extent *e)
323 union bch_extent_entry *entry = extent_entry_last(extent_i_to_s(e));
325 EBUG_ON(bkey_val_u64s(&e->k) + extent_entry_u64s(entry) >
326 BKEY_EXTENT_VAL_U64s_MAX);
328 e->k.u64s += extent_entry_u64s(entry);
331 static inline void extent_ptr_append(struct bkey_i_extent *e,
332 struct bch_extent_ptr ptr)
334 ptr.type = 1 << BCH_EXTENT_ENTRY_ptr;
335 extent_entry_last(extent_i_to_s(e))->ptr = ptr;
336 __extent_entry_push(e);
339 static inline struct bch_extent_crc128 crc_to_128(const struct bkey *k,
340 const union bch_extent_crc *crc)
344 switch (extent_crc_type(crc)) {
345 case BCH_EXTENT_CRC_NONE:
346 return (struct bch_extent_crc128) {
347 ._compressed_size = k->size - 1,
348 ._uncompressed_size = k->size - 1,
350 case BCH_EXTENT_CRC32:
351 return (struct bch_extent_crc128) {
352 .type = 1 << BCH_EXTENT_ENTRY_crc128,
353 ._compressed_size = crc->crc32._compressed_size,
354 ._uncompressed_size = crc->crc32._uncompressed_size,
355 .offset = crc->crc32.offset,
356 .csum_type = crc->crc32.csum_type,
357 .compression_type = crc->crc32.compression_type,
358 .csum.lo = crc->crc32.csum,
360 case BCH_EXTENT_CRC64:
361 return (struct bch_extent_crc128) {
362 .type = 1 << BCH_EXTENT_ENTRY_crc128,
363 ._compressed_size = crc->crc64._compressed_size,
364 ._uncompressed_size = crc->crc64._uncompressed_size,
365 .offset = crc->crc64.offset,
366 .nonce = crc->crc64.nonce,
367 .csum_type = crc->crc64.csum_type,
368 .compression_type = crc->crc64.compression_type,
369 .csum.lo = crc->crc64.csum_lo,
370 .csum.hi = crc->crc64.csum_hi,
372 case BCH_EXTENT_CRC128:
379 #define crc_compressed_size(_k, _crc) \
381 unsigned _size = 0; \
383 switch (extent_crc_type(_crc)) { \
384 case BCH_EXTENT_CRC_NONE: \
385 _size = ((const struct bkey *) (_k))->size; \
387 case BCH_EXTENT_CRC32: \
388 _size = ((struct bch_extent_crc32 *) _crc) \
389 ->_compressed_size + 1; \
391 case BCH_EXTENT_CRC64: \
392 _size = ((struct bch_extent_crc64 *) _crc) \
393 ->_compressed_size + 1; \
395 case BCH_EXTENT_CRC128: \
396 _size = ((struct bch_extent_crc128 *) _crc) \
397 ->_compressed_size + 1; \
403 #define crc_uncompressed_size(_k, _crc) \
405 unsigned _size = 0; \
407 switch (extent_crc_type(_crc)) { \
408 case BCH_EXTENT_CRC_NONE: \
409 _size = ((const struct bkey *) (_k))->size; \
411 case BCH_EXTENT_CRC32: \
412 _size = ((struct bch_extent_crc32 *) _crc) \
413 ->_uncompressed_size + 1; \
415 case BCH_EXTENT_CRC64: \
416 _size = ((struct bch_extent_crc64 *) _crc) \
417 ->_uncompressed_size + 1; \
419 case BCH_EXTENT_CRC128: \
420 _size = ((struct bch_extent_crc128 *) _crc) \
421 ->_uncompressed_size + 1; \
427 static inline unsigned crc_offset(const union bch_extent_crc *crc)
429 switch (extent_crc_type(crc)) {
430 case BCH_EXTENT_CRC_NONE:
432 case BCH_EXTENT_CRC32:
433 return crc->crc32.offset;
434 case BCH_EXTENT_CRC64:
435 return crc->crc64.offset;
436 case BCH_EXTENT_CRC128:
437 return crc->crc128.offset;
443 static inline unsigned crc_nonce(const union bch_extent_crc *crc)
445 switch (extent_crc_type(crc)) {
446 case BCH_EXTENT_CRC_NONE:
447 case BCH_EXTENT_CRC32:
449 case BCH_EXTENT_CRC64:
450 return crc->crc64.nonce;
451 case BCH_EXTENT_CRC128:
452 return crc->crc128.nonce;
458 static inline unsigned crc_csum_type(const union bch_extent_crc *crc)
460 switch (extent_crc_type(crc)) {
461 case BCH_EXTENT_CRC_NONE:
463 case BCH_EXTENT_CRC32:
464 return crc->crc32.csum_type;
465 case BCH_EXTENT_CRC64:
466 return crc->crc64.csum_type;
467 case BCH_EXTENT_CRC128:
468 return crc->crc128.csum_type;
474 static inline unsigned crc_compression_type(const union bch_extent_crc *crc)
476 switch (extent_crc_type(crc)) {
477 case BCH_EXTENT_CRC_NONE:
479 case BCH_EXTENT_CRC32:
480 return crc->crc32.compression_type;
481 case BCH_EXTENT_CRC64:
482 return crc->crc64.compression_type;
483 case BCH_EXTENT_CRC128:
484 return crc->crc128.compression_type;
490 static inline struct bch_csum crc_csum(const union bch_extent_crc *crc)
492 switch (extent_crc_type(crc)) {
493 case BCH_EXTENT_CRC_NONE:
494 return (struct bch_csum) { 0 };
495 case BCH_EXTENT_CRC32:
496 return (struct bch_csum) { .lo = crc->crc32.csum };
497 case BCH_EXTENT_CRC64:
498 return (struct bch_csum) {
499 .lo = crc->crc64.csum_lo,
500 .hi = crc->crc64.csum_hi,
502 case BCH_EXTENT_CRC128:
503 return crc->crc128.csum;
509 static inline unsigned bkey_extent_is_compressed(struct bkey_s_c k)
511 struct bkey_s_c_extent e;
512 const struct bch_extent_ptr *ptr;
513 const union bch_extent_crc *crc;
518 case BCH_EXTENT_CACHED:
519 e = bkey_s_c_to_extent(k);
521 extent_for_each_ptr_crc(e, ptr, crc)
523 crc_compression_type(crc) != BCH_COMPRESSION_NONE &&
524 crc_compressed_size(e.k, crc) < k.k->size)
525 ret = max_t(unsigned, ret,
526 crc_compressed_size(e.k, crc));
532 static inline unsigned extent_current_nonce(struct bkey_s_c_extent e)
534 const union bch_extent_crc *crc;
536 extent_for_each_crc(e, crc)
537 if (bch2_csum_type_is_encryption(crc_csum_type(crc)))
538 return crc_offset(crc) + crc_nonce(crc);
543 void bch2_extent_narrow_crcs(struct bkey_s_extent);
544 void bch2_extent_drop_redundant_crcs(struct bkey_s_extent);
546 void __bch2_extent_drop_ptr(struct bkey_s_extent, struct bch_extent_ptr *);
547 void bch2_extent_drop_ptr(struct bkey_s_extent, struct bch_extent_ptr *);
548 void bch2_extent_drop_ptr_idx(struct bkey_s_extent, unsigned);
550 const struct bch_extent_ptr *
551 bch2_extent_has_device(struct bkey_s_c_extent, unsigned);
552 struct bch_extent_ptr *
553 bch2_extent_find_ptr(struct bch_fs *, struct bkey_s_extent,
554 struct bch_extent_ptr);
555 struct bch_extent_ptr *
556 bch2_extent_find_matching_ptr(struct bch_fs *, struct bkey_s_extent,
557 struct bkey_s_c_extent);
559 bool bch2_cut_front(struct bpos, struct bkey_i *);
560 bool bch2_cut_back(struct bpos, struct bkey *);
561 void bch2_key_resize(struct bkey *, unsigned);
563 #endif /* _BCACHEFS_EXTENTS_H */