Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) /* SPDX-License-Identifier: GPL-2.0-or-later */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  * Crypto engine API
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  * Copyright (c) 2016 Baolin Wang <baolin.wang@linaro.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7) #ifndef _CRYPTO_ENGINE_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8) #define _CRYPTO_ENGINE_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10) #include <linux/crypto.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11) #include <linux/list.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13) #include <linux/kthread.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14) #include <crypto/algapi.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) #include <crypto/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #include <crypto/akcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) #include <crypto/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) #include <crypto/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) #define ENGINE_NAME_LEN	30
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22)  * struct crypto_engine - crypto hardware engine
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23)  * @name: the engine name
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24)  * @idling: the engine is entering idle state
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25)  * @busy: request pump is busy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26)  * @running: the engine is on working
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27)  * @retry_support: indication that the hardware allows re-execution
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28)  * of a failed backlog request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29)  * crypto-engine, in head position to keep order
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30)  * @list: link with the global crypto engine list
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31)  * @queue_lock: spinlock to syncronise access to request queue
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32)  * @queue: the crypto queue of the engine
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33)  * @rt: whether this queue is set to run as a realtime task
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34)  * @prepare_crypt_hardware: a request will soon arrive from the queue
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35)  * so the subsystem requests the driver to prepare the hardware
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36)  * by issuing this call
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37)  * @unprepare_crypt_hardware: there are currently no more requests on the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38)  * queue so the subsystem notifies the driver that it may relax the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39)  * hardware by issuing this call
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40)  * @do_batch_requests: execute a batch of requests. Depends on multiple
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41)  * requests support.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42)  * @kworker: kthread worker struct for request pump
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43)  * @pump_requests: work struct for scheduling work to the request pump
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44)  * @priv_data: the engine private data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45)  * @cur_req: the current request which is on processing
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47) struct crypto_engine {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48) 	char			name[ENGINE_NAME_LEN];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49) 	bool			idling;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) 	bool			busy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) 	bool			running;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53) 	bool			retry_support;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55) 	struct list_head	list;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56) 	spinlock_t		queue_lock;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57) 	struct crypto_queue	queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58) 	struct device		*dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60) 	bool			rt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62) 	int (*prepare_crypt_hardware)(struct crypto_engine *engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63) 	int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64) 	int (*do_batch_requests)(struct crypto_engine *engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67) 	struct kthread_worker           *kworker;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68) 	struct kthread_work             pump_requests;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) 	void				*priv_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) 	struct crypto_async_request	*cur_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75)  * struct crypto_engine_op - crypto hardware engine operations
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76)  * @prepare__request: do some prepare if need before handle the current request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77)  * @unprepare_request: undo any work done by prepare_request()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78)  * @do_one_request: do encryption for current request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) struct crypto_engine_op {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) 	int (*prepare_request)(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 			       void *areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 	int (*unprepare_request)(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) 				 void *areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) 	int (*do_one_request)(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) 			      void *areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) struct crypto_engine_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) 	struct crypto_engine_op op;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) 					   struct aead_request *req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) 					       struct akcipher_request *req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) 					       struct ahash_request *req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) 					       struct skcipher_request *req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) void crypto_finalize_aead_request(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) 				  struct aead_request *req, int err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) void crypto_finalize_akcipher_request(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) 				      struct akcipher_request *req, int err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) void crypto_finalize_hash_request(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) 				  struct ahash_request *req, int err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) void crypto_finalize_skcipher_request(struct crypto_engine *engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) 				      struct skcipher_request *req, int err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) int crypto_engine_start(struct crypto_engine *engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) int crypto_engine_stop(struct crypto_engine *engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) 						       bool retry_support,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) 						       int (*cbk_do_batch)(struct crypto_engine *engine),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) 						       bool rt, int qlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) int crypto_engine_exit(struct crypto_engine *engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) #endif /* _CRYPTO_ENGINE_H */