1 #ifndef _BCACHEFS_CHECKSUM_H
2 #define _BCACHEFS_CHECKSUM_H
7 #include <crypto/chacha20.h>
9 u64 bch2_crc64_update(u64, const void *, size_t);
11 #define BCH_NONCE_EXTENT cpu_to_le32(1 << 28)
12 #define BCH_NONCE_BTREE cpu_to_le32(2 << 28)
13 #define BCH_NONCE_JOURNAL cpu_to_le32(3 << 28)
14 #define BCH_NONCE_PRIO cpu_to_le32(4 << 28)
15 #define BCH_NONCE_POLY cpu_to_le32(1 << 31)
17 struct bch_csum bch2_checksum(struct bch_fs *, unsigned, struct nonce,
18 const void *, size_t);
21 * This is used for various on disk data structures - bch_sb, prio_set, bset,
22 * jset: The checksum is _always_ the first field of these structs
24 #define csum_vstruct(_c, _type, _nonce, _i) \
26 const void *start = ((const void *) (_i)) + sizeof((_i)->csum); \
27 const void *end = vstruct_end(_i); \
29 bch2_checksum(_c, _type, _nonce, start, end - start); \
32 int bch2_chacha_encrypt_key(struct bch_key *, struct nonce, void *, size_t);
33 int bch2_request_key(struct bch_sb *, struct bch_key *);
35 void bch2_encrypt(struct bch_fs *, unsigned, struct nonce,
38 struct bch_csum bch2_checksum_bio(struct bch_fs *, unsigned,
39 struct nonce, struct bio *);
40 void bch2_encrypt_bio(struct bch_fs *, unsigned,
41 struct nonce, struct bio *);
43 int bch2_disable_encryption(struct bch_fs *);
44 int bch2_enable_encryption(struct bch_fs *, bool);
46 void bch2_fs_encryption_exit(struct bch_fs *);
47 int bch2_fs_encryption_init(struct bch_fs *);
49 static inline enum bch_csum_type bch2_csum_opt_to_type(enum bch_csum_opts type)
52 case BCH_CSUM_OPT_NONE:
54 case BCH_CSUM_OPT_CRC32C:
55 return BCH_CSUM_CRC32C;
56 case BCH_CSUM_OPT_CRC64:
57 return BCH_CSUM_CRC64;
63 static inline enum bch_csum_type bch2_data_checksum_type(struct bch_fs *c)
65 if (c->sb.encryption_type)
66 return c->opts.wide_macs
67 ? BCH_CSUM_CHACHA20_POLY1305_128
68 : BCH_CSUM_CHACHA20_POLY1305_80;
70 return bch2_csum_opt_to_type(c->opts.data_checksum);
73 static inline enum bch_csum_type bch2_meta_checksum_type(struct bch_fs *c)
75 if (c->sb.encryption_type)
76 return BCH_CSUM_CHACHA20_POLY1305_128;
78 return bch2_csum_opt_to_type(c->opts.metadata_checksum);
81 static inline enum bch_compression_type
82 bch2_compression_opt_to_type(enum bch_compression_opts type)
85 case BCH_COMPRESSION_OPT_NONE:
86 return BCH_COMPRESSION_NONE;
87 case BCH_COMPRESSION_OPT_LZ4:
88 return BCH_COMPRESSION_LZ4;
89 case BCH_COMPRESSION_OPT_GZIP:
90 return BCH_COMPRESSION_GZIP;
96 static inline bool bch2_checksum_type_valid(const struct bch_fs *c,
99 if (type >= BCH_CSUM_NR)
102 if (bch2_csum_type_is_encryption(type) && !c->chacha20)
108 static const unsigned bch_crc_bytes[] = {
110 [BCH_CSUM_CRC32C] = 4,
111 [BCH_CSUM_CRC64] = 8,
112 [BCH_CSUM_CHACHA20_POLY1305_80] = 10,
113 [BCH_CSUM_CHACHA20_POLY1305_128] = 16,
116 static inline bool bch2_crc_cmp(struct bch_csum l, struct bch_csum r)
119 * XXX: need some way of preventing the compiler from optimizing this
120 * into a form that isn't constant time..
122 return ((l.lo ^ r.lo) | (l.hi ^ r.hi)) != 0;
125 /* for skipping ahead and encrypting/decrypting at an offset: */
126 static inline struct nonce nonce_add(struct nonce nonce, unsigned offset)
128 EBUG_ON(offset & (CHACHA20_BLOCK_SIZE - 1));
130 le32_add_cpu(&nonce.d[0], offset / CHACHA20_BLOCK_SIZE);
134 static inline bool bch2_key_is_encrypted(struct bch_encrypted_key *key)
136 return le64_to_cpu(key->magic) != BCH_KEY_MAGIC;
139 static inline struct nonce __bch2_sb_key_nonce(struct bch_sb *sb)
141 __le64 magic = __bch2_sb_magic(sb);
143 return (struct nonce) {{
146 [2] = ((__le32 *) &magic)[0],
147 [3] = ((__le32 *) &magic)[1],
151 static inline struct nonce bch2_sb_key_nonce(struct bch_fs *c)
153 __le64 magic = bch2_sb_magic(c);
155 return (struct nonce) {{
158 [2] = ((__le32 *) &magic)[0],
159 [3] = ((__le32 *) &magic)[1],
163 #endif /* _BCACHEFS_CHECKSUM_H */