Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    3)  * Software async crypto daemon.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    5)  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    6)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    7)  * Added AEAD support to cryptd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    8)  *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300    9)  *             Adrian Hoban <adrian.hoban@intel.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   10)  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   11)  *             Aidan O'Mahony (aidan.o.mahony@intel.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   12)  *    Copyright (c) 2010, Intel Corporation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   13)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   14) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   15) #include <crypto/internal/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   16) #include <crypto/internal/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   17) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   18) #include <crypto/cryptd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   19) #include <linux/refcount.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   20) #include <linux/err.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   21) #include <linux/init.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   22) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   23) #include <linux/list.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   24) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   25) #include <linux/scatterlist.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   26) #include <linux/sched.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   27) #include <linux/slab.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   28) #include <linux/workqueue.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   29) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   30) static unsigned int cryptd_max_cpu_qlen = 1000;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   31) module_param(cryptd_max_cpu_qlen, uint, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   32) MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   33) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   34) static struct workqueue_struct *cryptd_wq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   35) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   36) struct cryptd_cpu_queue {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   37) 	struct crypto_queue queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   38) 	struct work_struct work;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   39) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   40) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   41) struct cryptd_queue {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   42) 	struct cryptd_cpu_queue __percpu *cpu_queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   43) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   44) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   45) struct cryptd_instance_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   46) 	struct crypto_spawn spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   47) 	struct cryptd_queue *queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   48) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   49) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   50) struct skcipherd_instance_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   51) 	struct crypto_skcipher_spawn spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   52) 	struct cryptd_queue *queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   53) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   54) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   55) struct hashd_instance_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   56) 	struct crypto_shash_spawn spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   57) 	struct cryptd_queue *queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   58) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   59) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   60) struct aead_instance_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   61) 	struct crypto_aead_spawn aead_spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   62) 	struct cryptd_queue *queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   63) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   64) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   65) struct cryptd_skcipher_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   66) 	refcount_t refcnt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   67) 	struct crypto_sync_skcipher *child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   68) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   69) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   70) struct cryptd_skcipher_request_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   71) 	crypto_completion_t complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   72) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   73) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   74) struct cryptd_hash_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   75) 	refcount_t refcnt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   76) 	struct crypto_shash *child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   77) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   78) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   79) struct cryptd_hash_request_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   80) 	crypto_completion_t complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   81) 	struct shash_desc desc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   82) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   83) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   84) struct cryptd_aead_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   85) 	refcount_t refcnt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   86) 	struct crypto_aead *child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   87) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   88) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   89) struct cryptd_aead_request_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   90) 	crypto_completion_t complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   91) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   92) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   93) static void cryptd_queue_worker(struct work_struct *work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   94) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   95) static int cryptd_init_queue(struct cryptd_queue *queue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   96) 			     unsigned int max_cpu_qlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   97) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   98) 	int cpu;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   99) 	struct cryptd_cpu_queue *cpu_queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  100) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  101) 	queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  102) 	if (!queue->cpu_queue)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  103) 		return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  104) 	for_each_possible_cpu(cpu) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  105) 		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  106) 		crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  107) 		INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  108) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  109) 	pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  110) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  111) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  112) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  113) static void cryptd_fini_queue(struct cryptd_queue *queue)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  114) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  115) 	int cpu;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  116) 	struct cryptd_cpu_queue *cpu_queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  117) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  118) 	for_each_possible_cpu(cpu) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  119) 		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  120) 		BUG_ON(cpu_queue->queue.qlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  121) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  122) 	free_percpu(queue->cpu_queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  123) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  124) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  125) static int cryptd_enqueue_request(struct cryptd_queue *queue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  126) 				  struct crypto_async_request *request)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  127) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  128) 	int cpu, err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  129) 	struct cryptd_cpu_queue *cpu_queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  130) 	refcount_t *refcnt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  131) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  132) 	cpu = get_cpu();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  133) 	cpu_queue = this_cpu_ptr(queue->cpu_queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  134) 	err = crypto_enqueue_request(&cpu_queue->queue, request);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  135) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  136) 	refcnt = crypto_tfm_ctx(request->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  137) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  138) 	if (err == -ENOSPC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  139) 		goto out_put_cpu;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  140) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  141) 	queue_work_on(cpu, cryptd_wq, &cpu_queue->work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  142) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  143) 	if (!refcount_read(refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  144) 		goto out_put_cpu;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  145) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  146) 	refcount_inc(refcnt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  147) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  148) out_put_cpu:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  149) 	put_cpu();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  150) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  151) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  152) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  153) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  154) /* Called in workqueue context, do one real cryption work (via
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  155)  * req->complete) and reschedule itself if there are more work to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  156)  * do. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  157) static void cryptd_queue_worker(struct work_struct *work)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  158) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  159) 	struct cryptd_cpu_queue *cpu_queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  160) 	struct crypto_async_request *req, *backlog;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  161) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  162) 	cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  163) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  164) 	 * Only handle one request at a time to avoid hogging crypto workqueue.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  165) 	 * preempt_disable/enable is used to prevent being preempted by
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  166) 	 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  167) 	 * cryptd_enqueue_request() being accessed from software interrupts.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  168) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  169) 	local_bh_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  170) 	preempt_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  171) 	backlog = crypto_get_backlog(&cpu_queue->queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  172) 	req = crypto_dequeue_request(&cpu_queue->queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  173) 	preempt_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  174) 	local_bh_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  175) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  176) 	if (!req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  177) 		return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  178) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  179) 	if (backlog)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  180) 		backlog->complete(backlog, -EINPROGRESS);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  181) 	req->complete(req, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  182) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  183) 	if (cpu_queue->queue.qlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  184) 		queue_work(cryptd_wq, &cpu_queue->work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  185) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  186) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  187) static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  188) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  189) 	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  190) 	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  191) 	return ictx->queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  192) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  193) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  194) static void cryptd_type_and_mask(struct crypto_attr_type *algt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  195) 				 u32 *type, u32 *mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  196) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  197) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  198) 	 * cryptd is allowed to wrap internal algorithms, but in that case the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  199) 	 * resulting cryptd instance will be marked as internal as well.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  200) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  201) 	*type = algt->type & CRYPTO_ALG_INTERNAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  202) 	*mask = algt->mask & CRYPTO_ALG_INTERNAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  203) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  204) 	/* No point in cryptd wrapping an algorithm that's already async. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  205) 	*mask |= CRYPTO_ALG_ASYNC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  206) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  207) 	*mask |= crypto_algt_inherited_mask(algt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  208) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  209) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  210) static int cryptd_init_instance(struct crypto_instance *inst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  211) 				struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  212) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  213) 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  214) 		     "cryptd(%s)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  215) 		     alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  216) 		return -ENAMETOOLONG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  217) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  218) 	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  219) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  220) 	inst->alg.cra_priority = alg->cra_priority + 50;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  221) 	inst->alg.cra_blocksize = alg->cra_blocksize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  222) 	inst->alg.cra_alignmask = alg->cra_alignmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  223) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  224) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  225) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  226) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  227) static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  228) 				  const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  229) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  230) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  231) 	struct crypto_sync_skcipher *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  232) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  233) 	crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  234) 	crypto_sync_skcipher_set_flags(child,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  235) 				       crypto_skcipher_get_flags(parent) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  236) 					 CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  237) 	return crypto_sync_skcipher_setkey(child, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  238) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  239) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  240) static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  241) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  242) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  243) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  244) 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  245) 	int refcnt = refcount_read(&ctx->refcnt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  246) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  247) 	local_bh_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  248) 	rctx->complete(&req->base, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  249) 	local_bh_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  250) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  251) 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  252) 		crypto_free_skcipher(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  253) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  254) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  255) static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  256) 				    int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  257) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  258) 	struct skcipher_request *req = skcipher_request_cast(base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  259) 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  260) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  261) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  262) 	struct crypto_sync_skcipher *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  263) 	SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  264) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  265) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  266) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  267) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  268) 	skcipher_request_set_sync_tfm(subreq, child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  269) 	skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  270) 				      NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  271) 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  272) 				   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  273) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  274) 	err = crypto_skcipher_encrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  275) 	skcipher_request_zero(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  276) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  277) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  278) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  279) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  280) 	cryptd_skcipher_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  281) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  282) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  283) static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  284) 				    int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  285) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  286) 	struct skcipher_request *req = skcipher_request_cast(base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  287) 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  288) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  289) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  290) 	struct crypto_sync_skcipher *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  291) 	SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  292) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  293) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  294) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  295) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  296) 	skcipher_request_set_sync_tfm(subreq, child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  297) 	skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  298) 				      NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  299) 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  300) 				   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  301) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  302) 	err = crypto_skcipher_decrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  303) 	skcipher_request_zero(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  304) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  305) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  306) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  307) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  308) 	cryptd_skcipher_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  309) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  310) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  311) static int cryptd_skcipher_enqueue(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  312) 				   crypto_completion_t compl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  313) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  314) 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  315) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  316) 	struct cryptd_queue *queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  317) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  318) 	queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  319) 	rctx->complete = req->base.complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  320) 	req->base.complete = compl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  321) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  322) 	return cryptd_enqueue_request(queue, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  323) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  324) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  325) static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  326) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  327) 	return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  328) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  329) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  330) static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  331) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  332) 	return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  333) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  334) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  335) static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  336) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  337) 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  338) 	struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  339) 	struct crypto_skcipher_spawn *spawn = &ictx->spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  340) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  341) 	struct crypto_skcipher *cipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  342) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  343) 	cipher = crypto_spawn_skcipher(spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  344) 	if (IS_ERR(cipher))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  345) 		return PTR_ERR(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  346) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  347) 	ctx->child = (struct crypto_sync_skcipher *)cipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  348) 	crypto_skcipher_set_reqsize(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  349) 		tfm, sizeof(struct cryptd_skcipher_request_ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  350) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  351) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  352) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  353) static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  354) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  355) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  356) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  357) 	crypto_free_sync_skcipher(ctx->child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  358) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  359) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  360) static void cryptd_skcipher_free(struct skcipher_instance *inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  361) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  362) 	struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  363) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  364) 	crypto_drop_skcipher(&ctx->spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  365) 	kfree(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  366) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  367) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  368) static int cryptd_create_skcipher(struct crypto_template *tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  369) 				  struct rtattr **tb,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  370) 				  struct crypto_attr_type *algt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  371) 				  struct cryptd_queue *queue)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  372) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  373) 	struct skcipherd_instance_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  374) 	struct skcipher_instance *inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  375) 	struct skcipher_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  376) 	u32 type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  377) 	u32 mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  378) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  379) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  380) 	cryptd_type_and_mask(algt, &type, &mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  381) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  382) 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  383) 	if (!inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  384) 		return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  385) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  386) 	ctx = skcipher_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  387) 	ctx->queue = queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  388) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  389) 	err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  390) 				   crypto_attr_alg_name(tb[1]), type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  391) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  392) 		goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  393) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  394) 	alg = crypto_spawn_skcipher_alg(&ctx->spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  395) 	err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  396) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  397) 		goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  398) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  399) 	inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  400) 		(alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  401) 	inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  402) 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  403) 	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  404) 	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  405) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  406) 	inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  407) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  408) 	inst->alg.init = cryptd_skcipher_init_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  409) 	inst->alg.exit = cryptd_skcipher_exit_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  410) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  411) 	inst->alg.setkey = cryptd_skcipher_setkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  412) 	inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  413) 	inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  414) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  415) 	inst->free = cryptd_skcipher_free;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  416) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  417) 	err = skcipher_register_instance(tmpl, inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  418) 	if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  419) err_free_inst:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  420) 		cryptd_skcipher_free(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  421) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  422) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  423) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  424) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  425) static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  426) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  427) 	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  428) 	struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  429) 	struct crypto_shash_spawn *spawn = &ictx->spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  430) 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  431) 	struct crypto_shash *hash;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  432) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  433) 	hash = crypto_spawn_shash(spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  434) 	if (IS_ERR(hash))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  435) 		return PTR_ERR(hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  436) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  437) 	ctx->child = hash;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  438) 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  439) 				 sizeof(struct cryptd_hash_request_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  440) 				 crypto_shash_descsize(hash));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  441) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  442) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  443) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  444) static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  445) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  446) 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  447) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  448) 	crypto_free_shash(ctx->child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  449) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  450) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  451) static int cryptd_hash_setkey(struct crypto_ahash *parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  452) 				   const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  453) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  454) 	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  455) 	struct crypto_shash *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  456) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  457) 	crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  458) 	crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  459) 				      CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  460) 	return crypto_shash_setkey(child, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  461) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  462) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  463) static int cryptd_hash_enqueue(struct ahash_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  464) 				crypto_completion_t compl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  465) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  466) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  467) 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  468) 	struct cryptd_queue *queue =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  469) 		cryptd_get_queue(crypto_ahash_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  470) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  471) 	rctx->complete = req->base.complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  472) 	req->base.complete = compl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  473) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  474) 	return cryptd_enqueue_request(queue, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  475) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  476) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  477) static void cryptd_hash_complete(struct ahash_request *req, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  478) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  479) 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  480) 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  481) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  482) 	int refcnt = refcount_read(&ctx->refcnt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  483) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  484) 	local_bh_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  485) 	rctx->complete(&req->base, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  486) 	local_bh_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  487) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  488) 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  489) 		crypto_free_ahash(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  490) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  491) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  492) static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  493) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  494) 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  495) 	struct crypto_shash *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  496) 	struct ahash_request *req = ahash_request_cast(req_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  497) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  498) 	struct shash_desc *desc = &rctx->desc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  499) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  500) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  501) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  502) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  503) 	desc->tfm = child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  504) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  505) 	err = crypto_shash_init(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  506) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  507) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  508) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  509) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  510) 	cryptd_hash_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  511) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  512) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  513) static int cryptd_hash_init_enqueue(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  514) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  515) 	return cryptd_hash_enqueue(req, cryptd_hash_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  516) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  517) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  518) static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  519) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  520) 	struct ahash_request *req = ahash_request_cast(req_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  521) 	struct cryptd_hash_request_ctx *rctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  522) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  523) 	rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  524) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  525) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  526) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  527) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  528) 	err = shash_ahash_update(req, &rctx->desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  529) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  530) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  531) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  532) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  533) 	cryptd_hash_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  534) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  535) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  536) static int cryptd_hash_update_enqueue(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  537) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  538) 	return cryptd_hash_enqueue(req, cryptd_hash_update);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  539) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  540) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  541) static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  542) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  543) 	struct ahash_request *req = ahash_request_cast(req_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  544) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  545) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  546) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  547) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  548) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  549) 	err = crypto_shash_final(&rctx->desc, req->result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  550) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  551) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  552) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  553) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  554) 	cryptd_hash_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  555) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  556) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  557) static int cryptd_hash_final_enqueue(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  558) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  559) 	return cryptd_hash_enqueue(req, cryptd_hash_final);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  560) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  561) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  562) static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  563) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  564) 	struct ahash_request *req = ahash_request_cast(req_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  565) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  566) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  567) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  568) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  569) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  570) 	err = shash_ahash_finup(req, &rctx->desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  571) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  572) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  573) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  574) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  575) 	cryptd_hash_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  576) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  577) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  578) static int cryptd_hash_finup_enqueue(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  579) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  580) 	return cryptd_hash_enqueue(req, cryptd_hash_finup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  581) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  582) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  583) static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  584) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  585) 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  586) 	struct crypto_shash *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  587) 	struct ahash_request *req = ahash_request_cast(req_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  588) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  589) 	struct shash_desc *desc = &rctx->desc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  590) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  591) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  592) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  593) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  594) 	desc->tfm = child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  595) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  596) 	err = shash_ahash_digest(req, desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  597) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  598) 	req->base.complete = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  599) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  600) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  601) 	cryptd_hash_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  602) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  603) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  604) static int cryptd_hash_digest_enqueue(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  605) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  606) 	return cryptd_hash_enqueue(req, cryptd_hash_digest);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  607) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  608) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  609) static int cryptd_hash_export(struct ahash_request *req, void *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  610) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  611) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  612) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  613) 	return crypto_shash_export(&rctx->desc, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  614) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  615) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  616) static int cryptd_hash_import(struct ahash_request *req, const void *in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  617) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  618) 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  619) 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  620) 	struct shash_desc *desc = cryptd_shash_desc(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  621) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  622) 	desc->tfm = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  623) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  624) 	return crypto_shash_import(desc, in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  625) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  626) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  627) static void cryptd_hash_free(struct ahash_instance *inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  628) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  629) 	struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  630) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  631) 	crypto_drop_shash(&ctx->spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  632) 	kfree(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  633) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  634) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  635) static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  636) 			      struct crypto_attr_type *algt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  637) 			      struct cryptd_queue *queue)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  638) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  639) 	struct hashd_instance_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  640) 	struct ahash_instance *inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  641) 	struct shash_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  642) 	u32 type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  643) 	u32 mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  644) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  645) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  646) 	cryptd_type_and_mask(algt, &type, &mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  647) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  648) 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  649) 	if (!inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  650) 		return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  651) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  652) 	ctx = ahash_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  653) 	ctx->queue = queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  654) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  655) 	err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  656) 				crypto_attr_alg_name(tb[1]), type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  657) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  658) 		goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  659) 	alg = crypto_spawn_shash_alg(&ctx->spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  660) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  661) 	err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  662) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  663) 		goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  664) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  665) 	inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  666) 		(alg->base.cra_flags & (CRYPTO_ALG_INTERNAL|
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  667) 					CRYPTO_ALG_OPTIONAL_KEY));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  668) 	inst->alg.halg.digestsize = alg->digestsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  669) 	inst->alg.halg.statesize = alg->statesize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  670) 	inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  671) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  672) 	inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  673) 	inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  674) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  675) 	inst->alg.init   = cryptd_hash_init_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  676) 	inst->alg.update = cryptd_hash_update_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  677) 	inst->alg.final  = cryptd_hash_final_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  678) 	inst->alg.finup  = cryptd_hash_finup_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  679) 	inst->alg.export = cryptd_hash_export;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  680) 	inst->alg.import = cryptd_hash_import;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  681) 	if (crypto_shash_alg_has_setkey(alg))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  682) 		inst->alg.setkey = cryptd_hash_setkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  683) 	inst->alg.digest = cryptd_hash_digest_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  684) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  685) 	inst->free = cryptd_hash_free;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  686) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  687) 	err = ahash_register_instance(tmpl, inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  688) 	if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  689) err_free_inst:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  690) 		cryptd_hash_free(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  691) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  692) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  693) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  694) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  695) static int cryptd_aead_setkey(struct crypto_aead *parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  696) 			      const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  697) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  698) 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  699) 	struct crypto_aead *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  700) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  701) 	return crypto_aead_setkey(child, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  702) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  703) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  704) static int cryptd_aead_setauthsize(struct crypto_aead *parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  705) 				   unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  706) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  707) 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  708) 	struct crypto_aead *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  709) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  710) 	return crypto_aead_setauthsize(child, authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  711) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  712) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  713) static void cryptd_aead_crypt(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  714) 			struct crypto_aead *child,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  715) 			int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  716) 			int (*crypt)(struct aead_request *req))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  717) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  718) 	struct cryptd_aead_request_ctx *rctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  719) 	struct cryptd_aead_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  720) 	crypto_completion_t compl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  721) 	struct crypto_aead *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  722) 	int refcnt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  723) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  724) 	rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  725) 	compl = rctx->complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  726) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  727) 	tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  728) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  729) 	if (unlikely(err == -EINPROGRESS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  730) 		goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  731) 	aead_request_set_tfm(req, child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  732) 	err = crypt( req );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  733) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  734) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  735) 	ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  736) 	refcnt = refcount_read(&ctx->refcnt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  737) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  738) 	local_bh_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  739) 	compl(&req->base, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  740) 	local_bh_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  741) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  742) 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  743) 		crypto_free_aead(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  744) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  745) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  746) static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  747) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  748) 	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  749) 	struct crypto_aead *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  750) 	struct aead_request *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  751) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  752) 	req = container_of(areq, struct aead_request, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  753) 	cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  754) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  755) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  756) static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  757) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  758) 	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  759) 	struct crypto_aead *child = ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  760) 	struct aead_request *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  761) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  762) 	req = container_of(areq, struct aead_request, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  763) 	cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  764) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  765) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  766) static int cryptd_aead_enqueue(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  767) 				    crypto_completion_t compl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  768) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  769) 	struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  770) 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  771) 	struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  772) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  773) 	rctx->complete = req->base.complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  774) 	req->base.complete = compl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  775) 	return cryptd_enqueue_request(queue, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  776) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  777) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  778) static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  779) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  780) 	return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  781) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  782) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  783) static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  784) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  785) 	return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  786) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  787) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  788) static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  789) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  790) 	struct aead_instance *inst = aead_alg_instance(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  791) 	struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  792) 	struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  793) 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  794) 	struct crypto_aead *cipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  795) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  796) 	cipher = crypto_spawn_aead(spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  797) 	if (IS_ERR(cipher))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  798) 		return PTR_ERR(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  799) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  800) 	ctx->child = cipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  801) 	crypto_aead_set_reqsize(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  802) 		tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  803) 			 crypto_aead_reqsize(cipher)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  804) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  805) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  806) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  807) static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  808) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  809) 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  810) 	crypto_free_aead(ctx->child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  811) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  812) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  813) static void cryptd_aead_free(struct aead_instance *inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  814) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  815) 	struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  816) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  817) 	crypto_drop_aead(&ctx->aead_spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  818) 	kfree(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  819) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  820) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  821) static int cryptd_create_aead(struct crypto_template *tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  822) 		              struct rtattr **tb,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  823) 			      struct crypto_attr_type *algt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  824) 			      struct cryptd_queue *queue)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  825) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  826) 	struct aead_instance_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  827) 	struct aead_instance *inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  828) 	struct aead_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  829) 	u32 type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  830) 	u32 mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  831) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  832) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  833) 	cryptd_type_and_mask(algt, &type, &mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  834) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  835) 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  836) 	if (!inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  837) 		return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  838) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  839) 	ctx = aead_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  840) 	ctx->queue = queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  841) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  842) 	err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  843) 			       crypto_attr_alg_name(tb[1]), type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  844) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  845) 		goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  846) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  847) 	alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  848) 	err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  849) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  850) 		goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  851) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  852) 	inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  853) 		(alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  854) 	inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  855) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  856) 	inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  857) 	inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  858) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  859) 	inst->alg.init = cryptd_aead_init_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  860) 	inst->alg.exit = cryptd_aead_exit_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  861) 	inst->alg.setkey = cryptd_aead_setkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  862) 	inst->alg.setauthsize = cryptd_aead_setauthsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  863) 	inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  864) 	inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  865) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  866) 	inst->free = cryptd_aead_free;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  867) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  868) 	err = aead_register_instance(tmpl, inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  869) 	if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  870) err_free_inst:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  871) 		cryptd_aead_free(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  872) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  873) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  874) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  875) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  876) static struct cryptd_queue queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  877) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  878) static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  879) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  880) 	struct crypto_attr_type *algt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  881) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  882) 	algt = crypto_get_attr_type(tb);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  883) 	if (IS_ERR(algt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  884) 		return PTR_ERR(algt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  885) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  886) 	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  887) 	case CRYPTO_ALG_TYPE_SKCIPHER:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  888) 		return cryptd_create_skcipher(tmpl, tb, algt, &queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  889) 	case CRYPTO_ALG_TYPE_HASH:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  890) 		return cryptd_create_hash(tmpl, tb, algt, &queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  891) 	case CRYPTO_ALG_TYPE_AEAD:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  892) 		return cryptd_create_aead(tmpl, tb, algt, &queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  893) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  894) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  895) 	return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  896) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  897) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  898) static struct crypto_template cryptd_tmpl = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  899) 	.name = "cryptd",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  900) 	.create = cryptd_create,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  901) 	.module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  902) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  903) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  904) struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  905) 					      u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  906) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  907) 	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  908) 	struct cryptd_skcipher_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  909) 	struct crypto_skcipher *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  910) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  911) 	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  912) 		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  913) 		return ERR_PTR(-EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  914) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  915) 	tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  916) 	if (IS_ERR(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  917) 		return ERR_CAST(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  918) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  919) 	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  920) 		crypto_free_skcipher(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  921) 		return ERR_PTR(-EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  922) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  923) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  924) 	ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  925) 	refcount_set(&ctx->refcnt, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  926) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  927) 	return container_of(tfm, struct cryptd_skcipher, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  928) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  929) EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  930) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  931) struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  932) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  933) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  934) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  935) 	return &ctx->child->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  936) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  937) EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  938) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  939) bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  940) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  941) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  942) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  943) 	return refcount_read(&ctx->refcnt) - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  944) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  945) EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  946) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  947) void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  948) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  949) 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  950) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  951) 	if (refcount_dec_and_test(&ctx->refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  952) 		crypto_free_skcipher(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  953) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  954) EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  955) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  956) struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  957) 					u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  958) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  959) 	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  960) 	struct cryptd_hash_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  961) 	struct crypto_ahash *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  962) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  963) 	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  964) 		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  965) 		return ERR_PTR(-EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  966) 	tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  967) 	if (IS_ERR(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  968) 		return ERR_CAST(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  969) 	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  970) 		crypto_free_ahash(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  971) 		return ERR_PTR(-EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  972) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  973) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  974) 	ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  975) 	refcount_set(&ctx->refcnt, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  976) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  977) 	return __cryptd_ahash_cast(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  978) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  979) EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  980) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  981) struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  982) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  983) 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  984) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  985) 	return ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  986) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  987) EXPORT_SYMBOL_GPL(cryptd_ahash_child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  988) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  989) struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  990) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  991) 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  992) 	return &rctx->desc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  993) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  994) EXPORT_SYMBOL_GPL(cryptd_shash_desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  995) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  996) bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  997) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  998) 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  999) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1000) 	return refcount_read(&ctx->refcnt) - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1001) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1002) EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1003) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1004) void cryptd_free_ahash(struct cryptd_ahash *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1005) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1006) 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1007) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1008) 	if (refcount_dec_and_test(&ctx->refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1009) 		crypto_free_ahash(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1010) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1011) EXPORT_SYMBOL_GPL(cryptd_free_ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1012) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1013) struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1014) 						  u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1015) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1016) 	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1017) 	struct cryptd_aead_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1018) 	struct crypto_aead *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1019) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1020) 	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1021) 		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1022) 		return ERR_PTR(-EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1023) 	tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1024) 	if (IS_ERR(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1025) 		return ERR_CAST(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1026) 	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1027) 		crypto_free_aead(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1028) 		return ERR_PTR(-EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1029) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1030) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1031) 	ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1032) 	refcount_set(&ctx->refcnt, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1033) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1034) 	return __cryptd_aead_cast(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1035) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1036) EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1037) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1038) struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1039) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1040) 	struct cryptd_aead_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1041) 	ctx = crypto_aead_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1042) 	return ctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1043) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1044) EXPORT_SYMBOL_GPL(cryptd_aead_child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1045) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1046) bool cryptd_aead_queued(struct cryptd_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1047) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1048) 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1049) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1050) 	return refcount_read(&ctx->refcnt) - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1051) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1052) EXPORT_SYMBOL_GPL(cryptd_aead_queued);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1053) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1054) void cryptd_free_aead(struct cryptd_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1055) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1056) 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1057) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1058) 	if (refcount_dec_and_test(&ctx->refcnt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1059) 		crypto_free_aead(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1060) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1061) EXPORT_SYMBOL_GPL(cryptd_free_aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1062) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1063) static int __init cryptd_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1064) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1065) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1066) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1067) 	cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1068) 				    1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1069) 	if (!cryptd_wq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1070) 		return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1071) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1072) 	err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1073) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1074) 		goto err_destroy_wq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1075) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1076) 	err = crypto_register_template(&cryptd_tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1077) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1078) 		goto err_fini_queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1079) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1080) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1081) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1082) err_fini_queue:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1083) 	cryptd_fini_queue(&queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1084) err_destroy_wq:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1085) 	destroy_workqueue(cryptd_wq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1086) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1087) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1088) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1089) static void __exit cryptd_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1090) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1091) 	destroy_workqueue(cryptd_wq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1092) 	cryptd_fini_queue(&queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1093) 	crypto_unregister_template(&cryptd_tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1094) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1095) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1096) subsys_initcall(cryptd_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1097) module_exit(cryptd_exit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1098) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1099) MODULE_LICENSE("GPL");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1100) MODULE_DESCRIPTION("Software async crypto daemon");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1101) MODULE_ALIAS_CRYPTO("cryptd");