(bucket_to_sector(ca, bucket.offset) <<
MAX_EXTENT_COMPRESS_RATIO_SHIFT) + bucket_offset);
- BUG_ON(bkey_cmp(bucket, bp_pos_to_bucket(c, ret)));
+ BUG_ON(!bkey_eq(bucket, bp_pos_to_bucket(c, ret)));
return ret;
}
bch2_extent_ptr_to_bp(c, btree_id, level, k, p,
&bucket2, &bp2);
- if (!bpos_cmp(bucket, bucket2) &&
+ if (bpos_eq(bucket, bucket2) &&
!memcmp(&bp, &bp2, sizeof(bp)))
return true;
}
return -BCH_ERR_invalid_bkey;
}
- if (bpos_cmp(bp.k->p, bucket_pos_to_bp(c, bucket, bp.v->bucket_offset))) {
+ if (!bpos_eq(bp.k->p, bucket_pos_to_bp(c, bucket, bp.v->bucket_offset))) {
prt_str(err, "backpointer at wrong pos");
return -BCH_ERR_invalid_bkey;
}
for_each_btree_key_norestart(trans, bp_iter, BTREE_ID_backpointers,
bp_pos, 0, k, ret) {
- if (bpos_cmp(k.k->p, bp_end_pos) >= 0)
+ if (bpos_ge(k.k->p, bp_end_pos))
break;
if (k.k->type != KEY_TYPE_backpointer)
struct bkey_s_c alloc_k, bp_k;
int ret;
- if (bpos_cmp(bucket_pos, bucket_start) < 0 ||
- bpos_cmp(bucket_pos, bucket_end) > 0)
+ if (bpos_lt(bucket_pos, bucket_start) ||
+ bpos_gt(bucket_pos, bucket_end))
return 0;
bch2_trans_iter_init(trans, &alloc_iter, BTREE_ID_alloc, bucket_pos, 0);
return ret;
}
+static struct bpos bucket_pos_to_bp_safe(const struct bch_fs *c,
+ struct bpos bucket)
+{
+ return bch2_dev_exists2(c, bucket.inode)
+ ? bucket_pos_to_bp(c, bucket, 0)
+ : bucket;
+}
+
int bch2_get_alloc_in_memory_pos(struct btree_trans *trans,
struct bpos start, struct bpos *end)
{
bch2_trans_node_iter_init(trans, &alloc_iter, BTREE_ID_alloc,
start, 0, 1, 0);
bch2_trans_node_iter_init(trans, &bp_iter, BTREE_ID_backpointers,
- bucket_pos_to_bp(trans->c, start, 0), 0, 1, 0);
+ bucket_pos_to_bp_safe(trans->c, start), 0, 1, 0);
while (1) {
alloc_k = !alloc_end
? __bch2_btree_iter_peek_and_restart(trans, &alloc_iter, 0)
break;
}
- if (bpos_cmp(alloc_iter.pos, SPOS_MAX) &&
- bpos_cmp(bucket_pos_to_bp(trans->c, alloc_iter.pos, 0), bp_iter.pos) < 0) {
+ if (bpos_lt(alloc_iter.pos, SPOS_MAX) &&
+ bpos_lt(bucket_pos_to_bp_safe(trans->c, alloc_iter.pos), bp_iter.pos)) {
if (!bch2_btree_iter_advance(&alloc_iter))
alloc_end = true;
} else {
if (ret)
break;
- if (!bpos_cmp(start, POS_MIN) && bpos_cmp(end, SPOS_MAX))
+ if (bpos_eq(start, POS_MIN) && !bpos_eq(end, SPOS_MAX))
bch_verbose(c, "%s(): alloc info does not fit in ram, running in multiple passes with %zu nodes per pass",
__func__, btree_nodes_fit_in_ram(c));
- if (bpos_cmp(start, POS_MIN) || bpos_cmp(end, SPOS_MAX)) {
+ if (!bpos_eq(start, POS_MIN) || !bpos_eq(end, SPOS_MAX)) {
struct printbuf buf = PRINTBUF;
prt_str(&buf, "check_extents_to_backpointers(): ");
}
ret = bch2_check_extents_to_backpointers_pass(&trans, start, end);
- if (ret || !bpos_cmp(end, SPOS_MAX))
+ if (ret || bpos_eq(end, SPOS_MAX))
break;
start = bpos_successor(end);