^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Scatterlist Cryptographic API.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (c) 2002 David S. Miller (davem@redhat.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * and Nettle, by Niels Möller.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/err.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/kmod.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <linux/param.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <linux/sched/signal.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <linux/slab.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <linux/string.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <linux/completion.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include "internal.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) LIST_HEAD(crypto_alg_list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) EXPORT_SYMBOL_GPL(crypto_alg_list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) DECLARE_RWSEM(crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) EXPORT_SYMBOL_GPL(crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) BLOCKING_NOTIFIER_HEAD(crypto_chain);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) EXPORT_SYMBOL_GPL(crypto_chain);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) EXPORT_SYMBOL_GPL(crypto_mod_get);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) void crypto_mod_put(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) struct module *module = alg->cra_module;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) crypto_alg_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) module_put(module);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) EXPORT_SYMBOL_GPL(crypto_mod_put);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) static inline int crypto_is_test_larval(struct crypto_larval *larval)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) return larval->alg.cra_driver_name[0];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) struct crypto_alg *q, *alg = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) int best = -2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) list_for_each_entry(q, &crypto_alg_list, cra_list) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) int exact, fuzzy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) if (crypto_is_moribund(q))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) if ((q->cra_flags ^ type) & mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) if (crypto_is_larval(q) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) !crypto_is_test_larval((struct crypto_larval *)q) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) ((struct crypto_larval *)q)->mask != mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) exact = !strcmp(q->cra_driver_name, name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) fuzzy = !strcmp(q->cra_name, name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) if (!exact && !(fuzzy && q->cra_priority > best))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) if (unlikely(!crypto_mod_get(q)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) best = q->cra_priority;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) if (alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) crypto_mod_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) alg = q;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) if (exact)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) return alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) static void crypto_larval_destroy(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) struct crypto_larval *larval = (void *)alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) BUG_ON(!crypto_is_larval(alg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) if (!IS_ERR_OR_NULL(larval->adult))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) crypto_mod_put(larval->adult);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) kfree(larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) struct crypto_larval *larval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) larval = kzalloc(sizeof(*larval), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) if (!larval)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) return ERR_PTR(-ENOMEM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) larval->mask = mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) larval->alg.cra_priority = -1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) larval->alg.cra_destroy = crypto_larval_destroy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) init_completion(&larval->completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) return larval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) EXPORT_SYMBOL_GPL(crypto_larval_alloc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) struct crypto_larval *larval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) larval = crypto_larval_alloc(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) if (IS_ERR(larval))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) return ERR_CAST(larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) refcount_set(&larval->alg.cra_refcnt, 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) down_write(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) alg = __crypto_alg_lookup(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) if (!alg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) alg = &larval->alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) list_add(&alg->cra_list, &crypto_alg_list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) up_write(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) if (alg != &larval->alg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) kfree(larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) if (crypto_is_larval(alg))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) alg = crypto_larval_wait(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) return alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) void crypto_larval_kill(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) struct crypto_larval *larval = (void *)alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) down_write(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) list_del(&alg->cra_list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) up_write(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) complete_all(&larval->completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) crypto_alg_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) EXPORT_SYMBOL_GPL(crypto_larval_kill);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) struct crypto_larval *larval = (void *)alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) long timeout;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) timeout = wait_for_completion_killable_timeout(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) &larval->completion, 60 * HZ);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) alg = larval->adult;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) if (timeout < 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) alg = ERR_PTR(-EINTR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) else if (!timeout)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) alg = ERR_PTR(-ETIMEDOUT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) else if (!alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) alg = ERR_PTR(-ENOENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) else if (IS_ERR(alg))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) else if (crypto_is_test_larval(larval) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) !(alg->cra_flags & CRYPTO_ALG_TESTED))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) alg = ERR_PTR(-EAGAIN);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) else if (!crypto_mod_get(alg))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) alg = ERR_PTR(-EAGAIN);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) crypto_mod_put(&larval->alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) return alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) u32 test = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) if (!((type | mask) & CRYPTO_ALG_TESTED))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) test |= CRYPTO_ALG_TESTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) down_read(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) alg = __crypto_alg_lookup(name, type | test, mask | test);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) if (!alg && test) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) alg = __crypto_alg_lookup(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) if (alg && !crypto_is_larval(alg)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) /* Test failed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) crypto_mod_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) alg = ERR_PTR(-ELIBBAD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) up_read(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) return alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) if (!name)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) return ERR_PTR(-ENOENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) alg = crypto_alg_lookup(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) if (!alg && !(mask & CRYPTO_NOLOAD)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) request_module("crypto-%s", name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) CRYPTO_ALG_NEED_FALLBACK))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) request_module("crypto-%s-all", name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) alg = crypto_alg_lookup(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) alg = crypto_larval_wait(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) else if (!alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) alg = crypto_larval_add(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) return alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) int crypto_probing_notify(unsigned long val, void *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) int ok;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) ok = blocking_notifier_call_chain(&crypto_chain, val, v);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) if (ok == NOTIFY_DONE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) request_module("cryptomgr");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) ok = blocking_notifier_call_chain(&crypto_chain, val, v);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) return ok;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) EXPORT_SYMBOL_GPL(crypto_probing_notify);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) struct crypto_alg *larval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) int ok;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) * If the internal flag is set for a cipher, require a caller to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) * to invoke the cipher with the internal flag to use that cipher.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) * Also, if a caller wants to allocate a cipher that may or may
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) * !(mask & CRYPTO_ALG_INTERNAL).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) if (!((type | mask) & CRYPTO_ALG_INTERNAL))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) mask |= CRYPTO_ALG_INTERNAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) larval = crypto_larval_lookup(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) if (IS_ERR(larval) || !crypto_is_larval(larval))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) return larval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) if (ok == NOTIFY_STOP)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) alg = crypto_larval_wait(larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) crypto_mod_put(larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) alg = ERR_PTR(-ENOENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) crypto_larval_kill(larval);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) return alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) if (type_obj)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) return type_obj->init(tfm, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) static void crypto_exit_ops(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) const struct crypto_type *type = tfm->__crt_alg->cra_type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) if (type && tfm->exit)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) tfm->exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) const struct crypto_type *type_obj = alg->cra_type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) unsigned int len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) if (type_obj)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) return len + type_obj->ctxsize(alg, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) BUG();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) case CRYPTO_ALG_TYPE_CIPHER:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) len += crypto_cipher_ctxsize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) case CRYPTO_ALG_TYPE_COMPRESS:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) len += crypto_compress_ctxsize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) return len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) void crypto_shoot_alg(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) down_write(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) alg->cra_flags |= CRYPTO_ALG_DYING;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) up_write(&crypto_alg_sem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) EXPORT_SYMBOL_GPL(crypto_shoot_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) struct crypto_tfm *tfm = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) unsigned int tfm_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) int err = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) tfm = kzalloc(tfm_size, GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) if (tfm == NULL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) goto out_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) tfm->__crt_alg = alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) err = crypto_init_ops(tfm, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) goto out_free_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) goto cra_init_failed;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) cra_init_failed:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) crypto_exit_ops(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) out_free_tfm:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) if (err == -EAGAIN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) crypto_shoot_alg(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) kfree(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) out_err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) tfm = ERR_PTR(err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) return tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) * crypto_alloc_base - Locate algorithm and allocate transform
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) * @alg_name: Name of algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) * @type: Type of algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) * @mask: Mask for type comparison
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) * This function should not be used by new algorithm types.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) * Please use crypto_alloc_tfm instead.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) * crypto_alloc_base() will first attempt to locate an already loaded
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) * algorithm. If that fails and the kernel supports dynamically loadable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) * modules, it will then attempt to load a module of the same name or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) * alias. If that fails it will send a query to any loaded crypto manager
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) * to construct an algorithm on the fly. A refcount is grabbed on the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) * algorithm which is then associated with the new transform.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) * The returned transform is of a non-determinate type. Most people
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) * should use one of the more specific allocation functions such as
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) * crypto_alloc_skcipher().
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) * In case of error the return value is an error pointer.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) struct crypto_tfm *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) for (;;) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) alg = crypto_alg_mod_lookup(alg_name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) if (IS_ERR(alg)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) err = PTR_ERR(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) goto err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) tfm = __crypto_alloc_tfm(alg, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) if (!IS_ERR(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) return tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) crypto_mod_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) err = PTR_ERR(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) if (err != -EAGAIN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) if (fatal_signal_pending(current)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) err = -EINTR;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) return ERR_PTR(err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) EXPORT_SYMBOL_GPL(crypto_alloc_base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) void *crypto_create_tfm_node(struct crypto_alg *alg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) const struct crypto_type *frontend,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) int node)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) char *mem;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) struct crypto_tfm *tfm = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) unsigned int tfmsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) unsigned int total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) int err = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) tfmsize = frontend->tfmsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) mem = kzalloc_node(total, GFP_KERNEL, node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) if (mem == NULL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) goto out_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) tfm = (struct crypto_tfm *)(mem + tfmsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) tfm->__crt_alg = alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) tfm->node = node;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) err = frontend->init_tfm(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) goto out_free_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) goto cra_init_failed;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) cra_init_failed:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) crypto_exit_ops(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) out_free_tfm:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) if (err == -EAGAIN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) crypto_shoot_alg(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) kfree(mem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) out_err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) mem = ERR_PTR(err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) return mem;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) struct crypto_alg *crypto_find_alg(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) const struct crypto_type *frontend,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) if (frontend) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) type &= frontend->maskclear;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) mask &= frontend->maskclear;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) type |= frontend->type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) mask |= frontend->maskset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) return crypto_alg_mod_lookup(alg_name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) EXPORT_SYMBOL_GPL(crypto_find_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) * crypto_alloc_tfm_node - Locate algorithm and allocate transform
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) * @alg_name: Name of algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) * @frontend: Frontend algorithm type
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) * @type: Type of algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) * @mask: Mask for type comparison
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) * @node: NUMA node in which users desire to put requests, if node is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) * NUMA_NO_NODE, it means users have no special requirement.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) * crypto_alloc_tfm() will first attempt to locate an already loaded
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) * algorithm. If that fails and the kernel supports dynamically loadable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) * modules, it will then attempt to load a module of the same name or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) * alias. If that fails it will send a query to any loaded crypto manager
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) * to construct an algorithm on the fly. A refcount is grabbed on the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) * algorithm which is then associated with the new transform.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) * The returned transform is of a non-determinate type. Most people
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) * should use one of the more specific allocation functions such as
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) * crypto_alloc_skcipher().
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) * In case of error the return value is an error pointer.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) void *crypto_alloc_tfm_node(const char *alg_name,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) const struct crypto_type *frontend, u32 type, u32 mask,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) int node)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) void *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) for (;;) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) alg = crypto_find_alg(alg_name, frontend, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) if (IS_ERR(alg)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) err = PTR_ERR(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) goto err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) tfm = crypto_create_tfm_node(alg, frontend, node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) if (!IS_ERR(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) return tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) crypto_mod_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) err = PTR_ERR(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) if (err != -EAGAIN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) if (fatal_signal_pending(current)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) err = -EINTR;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) return ERR_PTR(err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) * crypto_destroy_tfm - Free crypto transform
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) * @mem: Start of tfm slab
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) * @tfm: Transform to free
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) * This function frees up the transform and any associated resources,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) * then drops the refcount on the associated algorithm.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) if (IS_ERR_OR_NULL(mem))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) alg = tfm->__crt_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) if (!tfm->exit && alg->cra_exit)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) alg->cra_exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) crypto_exit_ops(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) crypto_mod_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) kfree_sensitive(mem);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) int crypto_has_alg(const char *name, u32 type, u32 mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) if (!IS_ERR(alg)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) crypto_mod_put(alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) ret = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) EXPORT_SYMBOL_GPL(crypto_has_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) void crypto_req_done(struct crypto_async_request *req, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) struct crypto_wait *wait = req->data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) if (err == -EINPROGRESS)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) wait->err = err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) complete(&wait->completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) EXPORT_SYMBOL_GPL(crypto_req_done);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) MODULE_DESCRIPTION("Cryptographic core API");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) MODULE_LICENSE("GPL");