^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/neon.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <asm/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <asm/unaligned.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <crypto/internal/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <crypto/internal/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include "aes-ce-setkey.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) static int num_rounds(struct crypto_aes_ctx *ctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * # of rounds specified by AES:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * 128 bit key 10 rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * 192 bit key 12 rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * 256 bit key 14 rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * => n byte key => 6 + (n/4) rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) return 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) u32 *macp, u32 const rk[], u32 rounds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) u32 const rk[], u32 rounds, u8 mac[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) u8 ctr[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) u32 const rk[], u32 rounds, u8 mac[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) u8 ctr[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) u32 rounds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) return ce_aes_expandkey(ctx, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) if ((authsize & 1) || authsize < 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) u32 l = req->iv[0] + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) /* verify that CCM dimension 'L' is set correctly in the IV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) if (l < 2 || l > 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) /* verify that msglen can in fact be represented in L bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) if (l < 4 && msglen >> (8 * l))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) return -EOVERFLOW;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) * uses a u32 type to represent msglen so the top 4 bytes are always 0.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) n[0] = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) n[1] = cpu_to_be32(msglen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) * - bits 0..2 : max # of bytes required to represent msglen, minus 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) * (already set by caller)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) * - bit 6 : indicates presence of authenticate-only data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) if (req->assoclen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) maciv[0] |= 0x40;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) u32 abytes, u32 *macp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) if (crypto_simd_usable()) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) num_rounds(key));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) int added = min(abytes, AES_BLOCK_SIZE - *macp);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) crypto_xor(&mac[*macp], in, added);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) *macp += added;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) in += added;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) abytes -= added;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) while (abytes >= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) aes_encrypt(key, mac, mac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) crypto_xor(mac, in, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) in += AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) abytes -= AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) if (abytes > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) aes_encrypt(key, mac, mac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) crypto_xor(mac, in, abytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) *macp = abytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) struct __packed { __be16 l; __be32 h; u16 len; } ltag;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) struct scatter_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) u32 len = req->assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) u32 macp = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) /* prepend the AAD with a length tag */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) if (len < 0xff00) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) ltag.l = cpu_to_be16(len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) ltag.len = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) ltag.l = cpu_to_be16(0xfffe);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) put_unaligned_be32(len, <ag.h);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) ltag.len = 6;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) ccm_update_mac(ctx, mac, (u8 *)<ag, ltag.len, &macp);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) scatterwalk_start(&walk, req->src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) do {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) u32 n = scatterwalk_clamp(&walk, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) u8 *p;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) if (!n) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) scatterwalk_start(&walk, sg_next(walk.sg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) n = scatterwalk_clamp(&walk, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) p = scatterwalk_map(&walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) ccm_update_mac(ctx, mac, p, n, &macp);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) len -= n;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) scatterwalk_unmap(p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) scatterwalk_advance(&walk, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) scatterwalk_done(&walk, 0, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) } while (len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) struct crypto_aes_ctx *ctx, bool enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) u8 buf[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) int err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) while (walk->nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) int blocks = walk->nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) u32 tail = walk->nbytes % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) u8 *dst = walk->dst.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) u8 *src = walk->src.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) u32 nbytes = walk->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) if (nbytes == walk->total && tail > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) blocks++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) do {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) u32 bsize = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) if (nbytes < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) bsize = nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) crypto_inc(walk->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) aes_encrypt(ctx, buf, walk->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) aes_encrypt(ctx, mac, mac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) if (enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) crypto_xor(mac, src, bsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) crypto_xor_cpy(dst, src, buf, bsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) if (!enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) crypto_xor(mac, dst, bsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) dst += bsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) src += bsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) nbytes -= bsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) } while (--blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) err = skcipher_walk_done(walk, tail);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) if (!err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) aes_encrypt(ctx, buf, iv0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) aes_encrypt(ctx, mac, mac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) crypto_xor(mac, buf, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) static int ccm_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) u8 __aligned(8) mac[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) u8 buf[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) u32 len = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) err = ccm_init_mac(req, mac, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) if (req->assoclen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) ccm_calculate_auth_mac(req, mac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) /* preserve the original iv for the final round */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) memcpy(buf, req->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) err = skcipher_walk_aead_encrypt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) if (crypto_simd_usable()) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) while (walk.nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) u32 tail = walk.nbytes % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) if (walk.nbytes == walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) ce_aes_ccm_encrypt(walk.dst.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) walk.nbytes - tail, ctx->key_enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) num_rounds(ctx), mac, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) err = skcipher_walk_done(&walk, tail);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) if (!err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) ce_aes_ccm_final(mac, buf, ctx->key_enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) num_rounds(ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) /* copy authtag to end of dst */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) crypto_aead_authsize(aead), 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) static int ccm_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) unsigned int authsize = crypto_aead_authsize(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) u8 __aligned(8) mac[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) u8 buf[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) u32 len = req->cryptlen - authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) err = ccm_init_mac(req, mac, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) if (req->assoclen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) ccm_calculate_auth_mac(req, mac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) /* preserve the original iv for the final round */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) memcpy(buf, req->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) err = skcipher_walk_aead_decrypt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) if (crypto_simd_usable()) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) while (walk.nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) u32 tail = walk.nbytes % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) if (walk.nbytes == walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) ce_aes_ccm_decrypt(walk.dst.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) walk.nbytes - tail, ctx->key_enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) num_rounds(ctx), mac, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) err = skcipher_walk_done(&walk, tail);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) if (!err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) ce_aes_ccm_final(mac, buf, ctx->key_enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) num_rounds(ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) /* compare calculated auth tag with the stored one */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) scatterwalk_map_and_copy(buf, req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) req->assoclen + req->cryptlen - authsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) authsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) if (crypto_memneq(mac, buf, authsize))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) return -EBADMSG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) static struct aead_alg ccm_aes_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) .cra_name = "ccm(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) .cra_driver_name = "ccm-aes-ce",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) .cra_ctxsize = sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) .chunksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) .maxauthsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) .setkey = ccm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) .setauthsize = ccm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) .encrypt = ccm_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) .decrypt = ccm_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) static int __init aes_mod_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) if (!cpu_have_named_feature(AES))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) return crypto_register_aead(&ccm_aes_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) static void __exit aes_mod_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) crypto_unregister_aead(&ccm_aes_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) module_init(aes_mod_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) module_exit(aes_mod_exit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) MODULE_ALIAS_CRYPTO("ccm(aes)");