^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Bit sliced AES using NEON instructions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/neon.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <asm/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <crypto/ctr.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/internal/cipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <crypto/internal/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <crypto/xts.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) MODULE_ALIAS_CRYPTO("ecb(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) MODULE_ALIAS_CRYPTO("cbc(aes)-all");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) MODULE_ALIAS_CRYPTO("ctr(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) MODULE_ALIAS_CRYPTO("xts(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) MODULE_IMPORT_NS(CRYPTO_INTERNAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) int rounds, int blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) int rounds, int blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) int rounds, int blocks, u8 iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) int rounds, int blocks, u8 ctr[], u8 final[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) int rounds, int blocks, u8 iv[], int);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) int rounds, int blocks, u8 iv[], int);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) struct aesbs_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) int rounds;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) struct aesbs_cbc_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) struct aesbs_ctx key;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) struct crypto_skcipher *enc_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) struct aesbs_xts_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) struct aesbs_ctx key;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) struct crypto_cipher *cts_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) struct crypto_cipher *tweak_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) struct aesbs_ctr_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) struct aesbs_ctx key; /* must be first member */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) struct crypto_aes_ctx fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) struct crypto_aes_ctx rk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) err = aes_expandkey(&rk, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) ctx->rounds = 6 + key_len / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) static int __ecb_crypt(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) void (*fn)(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) int rounds, int blocks))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) while (walk.nbytes >= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) if (walk.nbytes < walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) blocks = round_down(blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) walk.stride / AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) ctx->rounds, blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) err = skcipher_walk_done(&walk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) walk.nbytes - blocks * AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) static int ecb_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) return __ecb_crypt(req, aesbs_ecb_encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) static int ecb_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) return __ecb_crypt(req, aesbs_ecb_decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) struct crypto_aes_ctx rk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) err = aes_expandkey(&rk, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) ctx->key.rounds = 6 + key_len / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) memzero_explicit(&rk, sizeof(rk));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) return crypto_skcipher_setkey(ctx->enc_tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) static int cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) struct skcipher_request *subreq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) skcipher_request_set_tfm(subreq, ctx->enc_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) skcipher_request_set_callback(subreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) skcipher_request_set_crypt(subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) req->cryptlen, req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) return crypto_skcipher_encrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) static int cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) while (walk.nbytes >= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) if (walk.nbytes < walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) blocks = round_down(blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) walk.stride / AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) ctx->key.rk, ctx->key.rounds, blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) err = skcipher_walk_done(&walk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) walk.nbytes - blocks * AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) static int cbc_init(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) unsigned int reqsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) ctx->enc_tfm = crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) if (IS_ERR(ctx->enc_tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) return PTR_ERR(ctx->enc_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) reqsize = sizeof(struct skcipher_request);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) reqsize += crypto_skcipher_reqsize(ctx->enc_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) crypto_skcipher_set_reqsize(tfm, reqsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) static void cbc_exit(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) crypto_free_skcipher(ctx->enc_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) err = aes_expandkey(&ctx->fallback, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) ctx->key.rounds = 6 + key_len / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) static int ctr_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) u8 buf[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) while (walk.nbytes > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) if (walk.nbytes < walk.total) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) blocks = round_down(blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) walk.stride / AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) final = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) ctx->rk, ctx->rounds, blocks, walk.iv, final);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) if (final) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) crypto_xor_cpy(dst, src, final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) walk.total % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) err = skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) err = skcipher_walk_done(&walk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) walk.nbytes - blocks * AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) * Temporarily disable interrupts to avoid races where
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) * cachelines are evicted when the CPU is interrupted
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) * to do something else.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) local_irq_save(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) aes_encrypt(&ctx->fallback, dst, src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) local_irq_restore(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) static int ctr_encrypt_sync(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) if (!crypto_simd_usable())
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) return ctr_encrypt(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) err = xts_verify_key(tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) key_len /= 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) err = crypto_cipher_setkey(ctx->cts_tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) err = crypto_cipher_setkey(ctx->tweak_tfm, in_key + key_len, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) return aesbs_setkey(tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) static int xts_init(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) ctx->cts_tfm = crypto_alloc_cipher("aes", 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) if (IS_ERR(ctx->cts_tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) return PTR_ERR(ctx->cts_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) ctx->tweak_tfm = crypto_alloc_cipher("aes", 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) if (IS_ERR(ctx->tweak_tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) crypto_free_cipher(ctx->cts_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) return PTR_ERR_OR_ZERO(ctx->tweak_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) static void xts_exit(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) crypto_free_cipher(ctx->tweak_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) crypto_free_cipher(ctx->cts_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) static int __xts_crypt(struct skcipher_request *req, bool encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) void (*fn)(u8 out[], u8 const in[], u8 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) int rounds, int blocks, u8 iv[], int))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) int tail = req->cryptlen % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) u8 buf[2 * AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) if (unlikely(tail)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) skcipher_request_set_callback(&subreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) req->cryptlen - tail, req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) req = &subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) err = skcipher_walk_virt(&walk, req, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) while (walk.nbytes >= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) int reorder_last_tweak = !encrypt && tail > 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) if (walk.nbytes < walk.total) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) blocks = round_down(blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) walk.stride / AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) reorder_last_tweak = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) ctx->key.rounds, blocks, walk.iv, reorder_last_tweak);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) err = skcipher_walk_done(&walk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) walk.nbytes - blocks * AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) if (err || likely(!tail))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) /* handle ciphertext stealing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) AES_BLOCK_SIZE, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) memcpy(buf + AES_BLOCK_SIZE, buf, tail);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) if (encrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) crypto_cipher_encrypt_one(ctx->cts_tfm, buf, buf);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) crypto_cipher_decrypt_one(ctx->cts_tfm, buf, buf);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) AES_BLOCK_SIZE + tail, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) static int xts_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) return __xts_crypt(req, true, aesbs_xts_encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) static int xts_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) return __xts_crypt(req, false, aesbs_xts_decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) static struct skcipher_alg aes_algs[] = { {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) .base.cra_name = "__ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) .base.cra_driver_name = "__ecb-aes-neonbs",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) .base.cra_priority = 250,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) .base.cra_ctxsize = sizeof(struct aesbs_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) .base.cra_flags = CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) .walksize = 8 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) .setkey = aesbs_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) .encrypt = ecb_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) .decrypt = ecb_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) .base.cra_name = "__cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) .base.cra_driver_name = "__cbc-aes-neonbs",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) .base.cra_priority = 250,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) .base.cra_flags = CRYPTO_ALG_INTERNAL |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) .walksize = 8 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) .setkey = aesbs_cbc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) .encrypt = cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) .decrypt = cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) .init = cbc_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) .exit = cbc_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) .base.cra_name = "__ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) .base.cra_driver_name = "__ctr-aes-neonbs",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) .base.cra_priority = 250,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) .base.cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) .base.cra_ctxsize = sizeof(struct aesbs_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) .base.cra_flags = CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) .chunksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) .walksize = 8 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) .setkey = aesbs_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) .encrypt = ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) .decrypt = ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) .base.cra_name = "ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) .base.cra_driver_name = "ctr-aes-neonbs-sync",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) .base.cra_priority = 250 - 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) .base.cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) .base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) .chunksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) .walksize = 8 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) .setkey = aesbs_ctr_setkey_sync,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) .encrypt = ctr_encrypt_sync,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) .decrypt = ctr_encrypt_sync,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) .base.cra_name = "__xts(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) .base.cra_driver_name = "__xts-aes-neonbs",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) .base.cra_priority = 250,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) .base.cra_flags = CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) .min_keysize = 2 * AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) .max_keysize = 2 * AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) .walksize = 8 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) .setkey = aesbs_xts_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) .encrypt = xts_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) .decrypt = xts_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) .init = xts_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) .exit = xts_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) } };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) static void aes_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) if (aes_simd_algs[i])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) simd_skcipher_free(aes_simd_algs[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) static int __init aes_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) struct simd_skcipher_alg *simd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) const char *basename;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) const char *algname;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) const char *drvname;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) if (!(elf_hwcap & HWCAP_NEON))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) algname = aes_algs[i].base.cra_name + 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) drvname = aes_algs[i].base.cra_driver_name + 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) basename = aes_algs[i].base.cra_driver_name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) simd = simd_skcipher_create_compat(algname, drvname, basename);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) err = PTR_ERR(simd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) if (IS_ERR(simd))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) goto unregister_simds;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) aes_simd_algs[i] = simd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) unregister_simds:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) aes_exit();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) late_initcall(aes_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) module_exit(aes_exit);