Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  3)  * Software async crypto daemon
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  5)  * Added AEAD support to cryptd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  6)  *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  7)  *             Adrian Hoban <adrian.hoban@intel.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  8)  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  9)  *             Aidan O'Mahony (aidan.o.mahony@intel.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)  *    Copyright (c) 2010, Intel Corporation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #ifndef _CRYPTO_CRYPT_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #define _CRYPTO_CRYPT_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <crypto/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <crypto/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <crypto/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) struct cryptd_skcipher {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) 	struct crypto_skcipher base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) /* alg_name should be algorithm to be cryptd-ed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) 					      u32 type, u32 mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) /* Must be called without moving CPUs. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) struct cryptd_ahash {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) 	struct crypto_ahash base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) static inline struct cryptd_ahash *__cryptd_ahash_cast(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) 	struct crypto_ahash *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) 	return (struct cryptd_ahash *)tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) /* alg_name should be algorithm to be cryptd-ed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) 					u32 type, u32 mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) /* Must be called without moving CPUs. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) void cryptd_free_ahash(struct cryptd_ahash *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) struct cryptd_aead {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) 	struct crypto_aead base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) static inline struct cryptd_aead *__cryptd_aead_cast(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) 	struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) 	return (struct cryptd_aead *)tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) 					  u32 type, u32 mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) /* Must be called without moving CPUs. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) bool cryptd_aead_queued(struct cryptd_aead *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) void cryptd_free_aead(struct cryptd_aead *tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) #endif