2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
20 #include <linux/atomic.h>
21 #include <linux/kernel.h>
22 #include <linux/list.h>
23 #include <linux/bug.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
28 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
29 * arbitrary modules to be loaded. Loading from userspace may still need the
30 * unprefixed names, so retains those aliases as well.
31 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
32 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
33 * expands twice on the same line. Instead, use a separate base name for the
36 #define MODULE_ALIAS_CRYPTO(name) \
37 __MODULE_INFO(alias, alias_userspace, name); \
38 __MODULE_INFO(alias, alias_crypto, "crypto-" name)
41 * Algorithm masks and types.
43 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
44 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
45 #define CRYPTO_ALG_TYPE_AEAD 0x00000003
46 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
47 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
48 #define CRYPTO_ALG_TYPE_SKCIPHER 0x00000005
49 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
50 #define CRYPTO_ALG_TYPE_KPP 0x00000008
51 #define CRYPTO_ALG_TYPE_RNG 0x0000000c
52 #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000000d
53 #define CRYPTO_ALG_TYPE_DIGEST 0x0000000e
54 #define CRYPTO_ALG_TYPE_HASH 0x0000000e
55 #define CRYPTO_ALG_TYPE_SHASH 0x0000000e
56 #define CRYPTO_ALG_TYPE_AHASH 0x0000000f
58 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
59 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
60 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
62 #define CRYPTO_ALG_ASYNC 0x00000080
65 * Set this bit if and only if the algorithm requires another algorithm of
66 * the same type to handle corner cases.
68 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
71 * This bit is set for symmetric key ciphers that have already been wrapped
72 * with a generic IV generator to prevent them from being wrapped again.
74 #define CRYPTO_ALG_GENIV 0x00000200
77 * Set if the algorithm is an instance that is build from templates.
79 #define CRYPTO_ALG_INSTANCE 0x00000800
81 /* Set this bit if the algorithm provided is hardware accelerated but
82 * not available to userspace via instruction set or so.
84 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000
87 * Mark a cipher as a service implementation only usable by another
88 * cipher and never by a normal user of the kernel crypto API
90 #define CRYPTO_ALG_INTERNAL 0x00002000
93 * Transform masks and values (for crt_flags).
95 #define CRYPTO_TFM_REQ_MASK 0x000fff00
96 #define CRYPTO_TFM_RES_MASK 0xfff00000
98 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
99 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
100 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
101 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
102 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
103 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
104 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
105 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
108 * Miscellaneous stuff.
110 #define CRYPTO_MAX_ALG_NAME 64
113 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
114 * declaration) is used to ensure that the crypto_tfm context structure is
115 * aligned correctly for the given architecture so that there are no alignment
116 * faults for C data types. In particular, this is required on platforms such
117 * as arm where pointers are 32-bit aligned but there are data types such as
118 * u64 which require 64-bit alignment.
120 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
122 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
125 struct crypto_blkcipher;
128 struct skcipher_givcrypt_request;
130 struct blkcipher_desc {
131 struct crypto_blkcipher *tfm;
137 struct crypto_tfm *tfm;
138 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
139 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
140 const u8 *src, unsigned int nbytes);
144 struct blkcipher_alg {
145 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
146 unsigned int keylen);
147 int (*encrypt)(struct blkcipher_desc *desc,
148 struct scatterlist *dst, struct scatterlist *src,
149 unsigned int nbytes);
150 int (*decrypt)(struct blkcipher_desc *desc,
151 struct scatterlist *dst, struct scatterlist *src,
152 unsigned int nbytes);
156 unsigned int min_keysize;
157 unsigned int max_keysize;
162 unsigned int cia_min_keysize;
163 unsigned int cia_max_keysize;
164 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
165 unsigned int keylen);
166 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
167 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
170 struct compress_alg {
171 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
172 unsigned int slen, u8 *dst, unsigned int *dlen);
173 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
174 unsigned int slen, u8 *dst, unsigned int *dlen);
178 #define cra_blkcipher cra_u.blkcipher
179 #define cra_cipher cra_u.cipher
180 #define cra_compress cra_u.compress
183 struct list_head cra_list;
184 struct list_head cra_users;
187 unsigned int cra_blocksize;
188 unsigned int cra_ctxsize;
189 unsigned int cra_alignmask;
194 char cra_name[CRYPTO_MAX_ALG_NAME];
195 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
197 const struct crypto_type *cra_type;
200 struct blkcipher_alg blkcipher;
201 struct cipher_alg cipher;
202 struct compress_alg compress;
205 int (*cra_init)(struct crypto_tfm *tfm);
206 void (*cra_exit)(struct crypto_tfm *tfm);
207 void (*cra_destroy)(struct crypto_alg *alg);
209 struct module *cra_module;
210 } CRYPTO_MINALIGN_ATTR;
213 * Algorithm registration interface.
215 int crypto_register_alg(struct crypto_alg *alg);
216 int crypto_unregister_alg(struct crypto_alg *alg);
217 int crypto_register_algs(struct crypto_alg *algs, int count);
218 int crypto_unregister_algs(struct crypto_alg *algs, int count);
221 * Algorithm query interface.
223 int crypto_has_alg(const char *name, u32 type, u32 mask);
226 * Transforms: user-instantiated objects which encapsulate algorithms
227 * and core processing logic. Managed via crypto_alloc_*() and
228 * crypto_free_*(), as well as the various helpers below.
231 struct blkcipher_tfm {
233 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
234 unsigned int keylen);
235 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
236 struct scatterlist *src, unsigned int nbytes);
237 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
238 struct scatterlist *src, unsigned int nbytes);
242 int (*cit_setkey)(struct crypto_tfm *tfm,
243 const u8 *key, unsigned int keylen);
244 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
245 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
248 struct compress_tfm {
249 int (*cot_compress)(struct crypto_tfm *tfm,
250 const u8 *src, unsigned int slen,
251 u8 *dst, unsigned int *dlen);
252 int (*cot_decompress)(struct crypto_tfm *tfm,
253 const u8 *src, unsigned int slen,
254 u8 *dst, unsigned int *dlen);
257 #define crt_blkcipher crt_u.blkcipher
258 #define crt_cipher crt_u.cipher
259 #define crt_compress crt_u.compress
266 struct blkcipher_tfm blkcipher;
267 struct cipher_tfm cipher;
268 struct compress_tfm compress;
271 void (*exit)(struct crypto_tfm *tfm);
273 struct crypto_alg *__crt_alg;
275 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
278 struct crypto_blkcipher {
279 struct crypto_tfm base;
282 struct crypto_cipher {
283 struct crypto_tfm base;
287 struct crypto_tfm base;
298 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
300 /* Maximum number of (rtattr) parameters for each template. */
301 #define CRYPTO_MAX_ATTRS 32
303 struct crypto_attr_alg {
304 char name[CRYPTO_MAX_ALG_NAME];
307 struct crypto_attr_type {
312 struct crypto_attr_u32 {
317 * Transform user interface.
320 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
321 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
323 static inline void crypto_free_tfm(struct crypto_tfm *tfm)
325 return crypto_destroy_tfm(tfm, tfm);
328 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
331 * Transform helpers which query the underlying algorithm.
333 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
335 return tfm->__crt_alg->cra_name;
338 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
340 return tfm->__crt_alg->cra_driver_name;
343 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
345 return tfm->__crt_alg->cra_priority;
348 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
350 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
353 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
355 return tfm->__crt_alg->cra_blocksize;
358 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
360 return tfm->__crt_alg->cra_alignmask;
363 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
365 return tfm->crt_flags;
368 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
370 tfm->crt_flags |= flags;
373 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
375 tfm->crt_flags &= ~flags;
378 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
380 return tfm->__crt_ctx;
383 static inline unsigned int crypto_tfm_ctx_alignment(void)
385 struct crypto_tfm *tfm;
386 return __alignof__(tfm->__crt_ctx);
389 static inline u32 crypto_skcipher_type(u32 type)
391 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
392 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
396 static inline u32 crypto_skcipher_mask(u32 mask)
398 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
399 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
404 * DOC: Synchronous Block Cipher API
406 * The synchronous block cipher API is used with the ciphers of type
407 * CRYPTO_ALG_TYPE_BLKCIPHER (listed as type "blkcipher" in /proc/crypto)
409 * Synchronous calls, have a context in the tfm. But since a single tfm can be
410 * used in multiple calls and in parallel, this info should not be changeable
411 * (unless a lock is used). This applies, for example, to the symmetric key.
412 * However, the IV is changeable, so there is an iv field in blkcipher_tfm
413 * structure for synchronous blkcipher api. So, its the only state info that can
414 * be kept for synchronous calls without using a big lock across a tfm.
416 * The block cipher API allows the use of a complete cipher, i.e. a cipher
417 * consisting of a template (a block chaining mode) and a single block cipher
418 * primitive (e.g. AES).
420 * The plaintext data buffer and the ciphertext data buffer are pointed to
421 * by using scatter/gather lists. The cipher operation is performed
422 * on all segments of the provided scatter/gather lists.
424 * The kernel crypto API supports a cipher operation "in-place" which means that
425 * the caller may provide the same scatter/gather list for the plaintext and
426 * cipher text. After the completion of the cipher operation, the plaintext
427 * data is replaced with the ciphertext data in case of an encryption and vice
428 * versa for a decryption. The caller must ensure that the scatter/gather lists
429 * for the output data point to sufficiently large buffers, i.e. multiples of
430 * the block size of the cipher.
433 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
434 struct crypto_tfm *tfm)
436 return (struct crypto_blkcipher *)tfm;
439 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
440 struct crypto_tfm *tfm)
442 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
443 return __crypto_blkcipher_cast(tfm);
447 * crypto_alloc_blkcipher() - allocate synchronous block cipher handle
448 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
450 * @type: specifies the type of the cipher
451 * @mask: specifies the mask for the cipher
453 * Allocate a cipher handle for a block cipher. The returned struct
454 * crypto_blkcipher is the cipher handle that is required for any subsequent
455 * API invocation for that block cipher.
457 * Return: allocated cipher handle in case of success; IS_ERR() is true in case
458 * of an error, PTR_ERR() returns the error code.
460 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
461 const char *alg_name, u32 type, u32 mask)
463 type &= ~CRYPTO_ALG_TYPE_MASK;
464 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
465 mask |= CRYPTO_ALG_TYPE_MASK;
467 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
470 static inline struct crypto_tfm *crypto_blkcipher_tfm(
471 struct crypto_blkcipher *tfm)
477 * crypto_free_blkcipher() - zeroize and free the block cipher handle
478 * @tfm: cipher handle to be freed
480 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
482 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
486 * crypto_has_blkcipher() - Search for the availability of a block cipher
487 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
489 * @type: specifies the type of the cipher
490 * @mask: specifies the mask for the cipher
492 * Return: true when the block cipher is known to the kernel crypto API; false
495 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
497 type &= ~CRYPTO_ALG_TYPE_MASK;
498 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
499 mask |= CRYPTO_ALG_TYPE_MASK;
501 return crypto_has_alg(alg_name, type, mask);
505 * crypto_blkcipher_name() - return the name / cra_name from the cipher handle
506 * @tfm: cipher handle
508 * Return: The character string holding the name of the cipher
510 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
512 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
515 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
516 struct crypto_blkcipher *tfm)
518 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
521 static inline struct blkcipher_alg *crypto_blkcipher_alg(
522 struct crypto_blkcipher *tfm)
524 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
528 * crypto_blkcipher_ivsize() - obtain IV size
529 * @tfm: cipher handle
531 * The size of the IV for the block cipher referenced by the cipher handle is
532 * returned. This IV size may be zero if the cipher does not need an IV.
534 * Return: IV size in bytes
536 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
538 return crypto_blkcipher_alg(tfm)->ivsize;
542 * crypto_blkcipher_blocksize() - obtain block size of cipher
543 * @tfm: cipher handle
545 * The block size for the block cipher referenced with the cipher handle is
546 * returned. The caller may use that information to allocate appropriate
547 * memory for the data returned by the encryption or decryption operation.
549 * Return: block size of cipher
551 static inline unsigned int crypto_blkcipher_blocksize(
552 struct crypto_blkcipher *tfm)
554 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
557 static inline unsigned int crypto_blkcipher_alignmask(
558 struct crypto_blkcipher *tfm)
560 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
563 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
565 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
568 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
571 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
574 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
577 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
581 * crypto_blkcipher_setkey() - set key for cipher
582 * @tfm: cipher handle
583 * @key: buffer holding the key
584 * @keylen: length of the key in bytes
586 * The caller provided key is set for the block cipher referenced by the cipher
589 * Note, the key length determines the cipher type. Many block ciphers implement
590 * different cipher modes depending on the key size, such as AES-128 vs AES-192
591 * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
594 * Return: 0 if the setting of the key was successful; < 0 if an error occurred
596 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
597 const u8 *key, unsigned int keylen)
599 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
604 * crypto_blkcipher_encrypt() - encrypt plaintext
605 * @desc: reference to the block cipher handle with meta data
606 * @dst: scatter/gather list that is filled by the cipher operation with the
608 * @src: scatter/gather list that holds the plaintext
609 * @nbytes: number of bytes of the plaintext to encrypt.
611 * Encrypt plaintext data using the IV set by the caller with a preceding
612 * call of crypto_blkcipher_set_iv.
614 * The blkcipher_desc data structure must be filled by the caller and can
615 * reside on the stack. The caller must fill desc as follows: desc.tfm is filled
616 * with the block cipher handle; desc.flags is filled with either
617 * CRYPTO_TFM_REQ_MAY_SLEEP or 0.
619 * Return: 0 if the cipher operation was successful; < 0 if an error occurred
621 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
622 struct scatterlist *dst,
623 struct scatterlist *src,
626 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
627 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
631 * crypto_blkcipher_encrypt_iv() - encrypt plaintext with dedicated IV
632 * @desc: reference to the block cipher handle with meta data
633 * @dst: scatter/gather list that is filled by the cipher operation with the
635 * @src: scatter/gather list that holds the plaintext
636 * @nbytes: number of bytes of the plaintext to encrypt.
638 * Encrypt plaintext data with the use of an IV that is solely used for this
639 * cipher operation. Any previously set IV is not used.
641 * The blkcipher_desc data structure must be filled by the caller and can
642 * reside on the stack. The caller must fill desc as follows: desc.tfm is filled
643 * with the block cipher handle; desc.info is filled with the IV to be used for
644 * the current operation; desc.flags is filled with either
645 * CRYPTO_TFM_REQ_MAY_SLEEP or 0.
647 * Return: 0 if the cipher operation was successful; < 0 if an error occurred
649 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
650 struct scatterlist *dst,
651 struct scatterlist *src,
654 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
658 * crypto_blkcipher_decrypt() - decrypt ciphertext
659 * @desc: reference to the block cipher handle with meta data
660 * @dst: scatter/gather list that is filled by the cipher operation with the
662 * @src: scatter/gather list that holds the ciphertext
663 * @nbytes: number of bytes of the ciphertext to decrypt.
665 * Decrypt ciphertext data using the IV set by the caller with a preceding
666 * call of crypto_blkcipher_set_iv.
668 * The blkcipher_desc data structure must be filled by the caller as documented
669 * for the crypto_blkcipher_encrypt call above.
671 * Return: 0 if the cipher operation was successful; < 0 if an error occurred
674 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
675 struct scatterlist *dst,
676 struct scatterlist *src,
679 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
680 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
684 * crypto_blkcipher_decrypt_iv() - decrypt ciphertext with dedicated IV
685 * @desc: reference to the block cipher handle with meta data
686 * @dst: scatter/gather list that is filled by the cipher operation with the
688 * @src: scatter/gather list that holds the ciphertext
689 * @nbytes: number of bytes of the ciphertext to decrypt.
691 * Decrypt ciphertext data with the use of an IV that is solely used for this
692 * cipher operation. Any previously set IV is not used.
694 * The blkcipher_desc data structure must be filled by the caller as documented
695 * for the crypto_blkcipher_encrypt_iv call above.
697 * Return: 0 if the cipher operation was successful; < 0 if an error occurred
699 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
700 struct scatterlist *dst,
701 struct scatterlist *src,
704 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
708 * crypto_blkcipher_set_iv() - set IV for cipher
709 * @tfm: cipher handle
710 * @src: buffer holding the IV
711 * @len: length of the IV in bytes
713 * The caller provided IV is set for the block cipher referenced by the cipher
716 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
717 const u8 *src, unsigned int len)
719 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
723 * crypto_blkcipher_get_iv() - obtain IV from cipher
724 * @tfm: cipher handle
725 * @dst: buffer filled with the IV
726 * @len: length of the buffer dst
728 * The caller can obtain the IV set for the block cipher referenced by the
729 * cipher handle and store it into the user-provided buffer. If the buffer
730 * has an insufficient space, the IV is truncated to fit the buffer.
732 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
733 u8 *dst, unsigned int len)
735 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
739 * DOC: Single Block Cipher API
741 * The single block cipher API is used with the ciphers of type
742 * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto).
744 * Using the single block cipher API calls, operations with the basic cipher
745 * primitive can be implemented. These cipher primitives exclude any block
746 * chaining operations including IV handling.
748 * The purpose of this single block cipher API is to support the implementation
749 * of templates or other concepts that only need to perform the cipher operation
750 * on one block at a time. Templates invoke the underlying cipher primitive
751 * block-wise and process either the input or the output data of these cipher
755 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
757 return (struct crypto_cipher *)tfm;
760 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
762 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
763 return __crypto_cipher_cast(tfm);
767 * crypto_alloc_cipher() - allocate single block cipher handle
768 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
769 * single block cipher
770 * @type: specifies the type of the cipher
771 * @mask: specifies the mask for the cipher
773 * Allocate a cipher handle for a single block cipher. The returned struct
774 * crypto_cipher is the cipher handle that is required for any subsequent API
775 * invocation for that single block cipher.
777 * Return: allocated cipher handle in case of success; IS_ERR() is true in case
778 * of an error, PTR_ERR() returns the error code.
780 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
783 type &= ~CRYPTO_ALG_TYPE_MASK;
784 type |= CRYPTO_ALG_TYPE_CIPHER;
785 mask |= CRYPTO_ALG_TYPE_MASK;
787 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
790 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
796 * crypto_free_cipher() - zeroize and free the single block cipher handle
797 * @tfm: cipher handle to be freed
799 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
801 crypto_free_tfm(crypto_cipher_tfm(tfm));
805 * crypto_has_cipher() - Search for the availability of a single block cipher
806 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
807 * single block cipher
808 * @type: specifies the type of the cipher
809 * @mask: specifies the mask for the cipher
811 * Return: true when the single block cipher is known to the kernel crypto API;
814 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
816 type &= ~CRYPTO_ALG_TYPE_MASK;
817 type |= CRYPTO_ALG_TYPE_CIPHER;
818 mask |= CRYPTO_ALG_TYPE_MASK;
820 return crypto_has_alg(alg_name, type, mask);
823 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
825 return &crypto_cipher_tfm(tfm)->crt_cipher;
829 * crypto_cipher_blocksize() - obtain block size for cipher
830 * @tfm: cipher handle
832 * The block size for the single block cipher referenced with the cipher handle
833 * tfm is returned. The caller may use that information to allocate appropriate
834 * memory for the data returned by the encryption or decryption operation
836 * Return: block size of cipher
838 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
840 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
843 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
845 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
848 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
850 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
853 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
856 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
859 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
862 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
866 * crypto_cipher_setkey() - set key for cipher
867 * @tfm: cipher handle
868 * @key: buffer holding the key
869 * @keylen: length of the key in bytes
871 * The caller provided key is set for the single block cipher referenced by the
874 * Note, the key length determines the cipher type. Many block ciphers implement
875 * different cipher modes depending on the key size, such as AES-128 vs AES-192
876 * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
879 * Return: 0 if the setting of the key was successful; < 0 if an error occurred
881 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
882 const u8 *key, unsigned int keylen)
884 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
889 * crypto_cipher_encrypt_one() - encrypt one block of plaintext
890 * @tfm: cipher handle
891 * @dst: points to the buffer that will be filled with the ciphertext
892 * @src: buffer holding the plaintext to be encrypted
894 * Invoke the encryption operation of one block. The caller must ensure that
895 * the plaintext and ciphertext buffers are at least one block in size.
897 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
898 u8 *dst, const u8 *src)
900 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
905 * crypto_cipher_decrypt_one() - decrypt one block of ciphertext
906 * @tfm: cipher handle
907 * @dst: points to the buffer that will be filled with the plaintext
908 * @src: buffer holding the ciphertext to be decrypted
910 * Invoke the decryption operation of one block. The caller must ensure that
911 * the plaintext and ciphertext buffers are at least one block in size.
913 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
914 u8 *dst, const u8 *src)
916 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
920 #endif /* _LINUX_CRYPTO_H */