^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * CMAC: Cipher Block Mode for Authentication
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Based on work by:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Based on crypto/xcbc.c:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * Copyright © 2006 USAGI/WIDE Project,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <crypto/internal/cipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <crypto/internal/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/err.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * | <parent tfm>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * | cmac_tfm_ctx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * | consts (block size * 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) struct cmac_tfm_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) struct crypto_cipher *child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) u8 ctx[];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) * | <shash desc>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) * | cmac_desc_ctx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) * | odds (block size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) * | prev (block size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) * +------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) struct cmac_desc_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) unsigned int len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) u8 ctx[];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) static int crypto_cmac_digest_setkey(struct crypto_shash *parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) const u8 *inkey, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) unsigned long alignmask = crypto_shash_alignmask(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) unsigned int bs = crypto_shash_blocksize(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) __be64 *consts = PTR_ALIGN((void *)ctx->ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) (alignmask | (__alignof__(__be64) - 1)) + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) u64 _const[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) int i, err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) u8 msb_mask, gfmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) err = crypto_cipher_setkey(ctx->child, inkey, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) /* encrypt the zero block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) memset(consts, 0, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) switch (bs) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) case 16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) gfmask = 0x87;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) _const[0] = be64_to_cpu(consts[1]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) _const[1] = be64_to_cpu(consts[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) /* gf(2^128) multiply zero-ciphertext with u and u^2 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) for (i = 0; i < 4; i += 2) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) msb_mask = ((s64)_const[1] >> 63) & gfmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) _const[1] = (_const[1] << 1) | (_const[0] >> 63);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) _const[0] = (_const[0] << 1) ^ msb_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) consts[i + 0] = cpu_to_be64(_const[1]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) consts[i + 1] = cpu_to_be64(_const[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) gfmask = 0x1B;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) _const[0] = be64_to_cpu(consts[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) /* gf(2^64) multiply zero-ciphertext with u and u^2 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) for (i = 0; i < 2; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) msb_mask = ((s64)_const[0] >> 63) & gfmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) _const[0] = (_const[0] << 1) ^ msb_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) consts[i] = cpu_to_be64(_const[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) static int crypto_cmac_digest_init(struct shash_desc *pdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) int bs = crypto_shash_blocksize(pdesc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) ctx->len = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) memset(prev, 0, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) struct crypto_shash *parent = pdesc->tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) unsigned long alignmask = crypto_shash_alignmask(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) struct crypto_cipher *tfm = tctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) int bs = crypto_shash_blocksize(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) u8 *prev = odds + bs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) /* checking the data can fill the block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) if ((ctx->len + len) <= bs) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) memcpy(odds + ctx->len, p, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) ctx->len += len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) /* filling odds with new data and encrypting it */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) memcpy(odds + ctx->len, p, bs - ctx->len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) len -= bs - ctx->len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) p += bs - ctx->len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) crypto_xor(prev, odds, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) crypto_cipher_encrypt_one(tfm, prev, prev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) /* clearing the length */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) ctx->len = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) /* encrypting the rest of data */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) while (len > bs) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) crypto_xor(prev, p, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) crypto_cipher_encrypt_one(tfm, prev, prev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) p += bs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) len -= bs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) /* keeping the surplus of blocksize */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) if (len) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) memcpy(odds, p, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) ctx->len = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) struct crypto_shash *parent = pdesc->tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) unsigned long alignmask = crypto_shash_alignmask(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) struct crypto_cipher *tfm = tctx->child;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) int bs = crypto_shash_blocksize(parent);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) u8 *consts = PTR_ALIGN((void *)tctx->ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) (alignmask | (__alignof__(__be64) - 1)) + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) u8 *prev = odds + bs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) unsigned int offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) if (ctx->len != bs) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) unsigned int rlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) u8 *p = odds + ctx->len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) *p = 0x80;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) p++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) rlen = bs - ctx->len - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) if (rlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) memset(p, 0, rlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) offset += bs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) crypto_xor(prev, odds, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) crypto_xor(prev, consts + offset, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) crypto_cipher_encrypt_one(tfm, out, prev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) static int cmac_init_tfm(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) struct crypto_cipher *cipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) struct crypto_instance *inst = (void *)tfm->__crt_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) cipher = crypto_spawn_cipher(spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) if (IS_ERR(cipher))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) return PTR_ERR(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) ctx->child = cipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) static void cmac_exit_tfm(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) crypto_free_cipher(ctx->child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) struct shash_instance *inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) struct crypto_cipher_spawn *spawn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) unsigned long alignmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) u32 mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) if (!inst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) spawn = shash_instance_ctx(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) crypto_attr_alg_name(tb[1]), 0, mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) alg = crypto_spawn_cipher_alg(spawn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) switch (alg->cra_blocksize) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) case 16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) err = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) goto err_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) alignmask = alg->cra_alignmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) inst->alg.base.cra_alignmask = alignmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) inst->alg.base.cra_priority = alg->cra_priority;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) inst->alg.base.cra_blocksize = alg->cra_blocksize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) inst->alg.digestsize = alg->cra_blocksize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) inst->alg.descsize =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment())
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) + (alignmask & ~(crypto_tfm_ctx_alignment() - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) + alg->cra_blocksize * 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) inst->alg.base.cra_ctxsize =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) ALIGN(sizeof(struct cmac_tfm_ctx), crypto_tfm_ctx_alignment())
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) + ((alignmask | (__alignof__(__be64) - 1)) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) ~(crypto_tfm_ctx_alignment() - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) + alg->cra_blocksize * 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) inst->alg.base.cra_init = cmac_init_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) inst->alg.base.cra_exit = cmac_exit_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) inst->alg.init = crypto_cmac_digest_init;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) inst->alg.update = crypto_cmac_digest_update;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) inst->alg.final = crypto_cmac_digest_final;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) inst->alg.setkey = crypto_cmac_digest_setkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) inst->free = shash_free_singlespawn_instance;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) err = shash_register_instance(tmpl, inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) err_free_inst:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) shash_free_singlespawn_instance(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) static struct crypto_template crypto_cmac_tmpl = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) .name = "cmac",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) .create = cmac_create,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) .module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) static int __init crypto_cmac_module_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) return crypto_register_template(&crypto_cmac_tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) static void __exit crypto_cmac_module_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) crypto_unregister_template(&crypto_cmac_tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) subsys_initcall(crypto_cmac_module_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) module_exit(crypto_cmac_module_exit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) MODULE_LICENSE("GPL");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) MODULE_DESCRIPTION("CMAC keyed hash algorithm");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) MODULE_ALIAS_CRYPTO("cmac");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) MODULE_IMPORT_NS(CRYPTO_INTERNAL);