Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  * aes-ce-glue.c - wrapper code for ARMv8 AES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8) #include <asm/hwcap.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9) #include <asm/neon.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10) #include <asm/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11) #include <asm/unaligned.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13) #include <crypto/ctr.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14) #include <crypto/internal/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) #include <linux/cpufeature.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) #include <crypto/xts.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) /* defined in aes-ce-core.S */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26) asmlinkage u32 ce_aes_sub(u32 input);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27) asmlinkage void ce_aes_invert(void *dst, void *src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29) asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30) 				   int rounds, int blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31) asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32) 				   int rounds, int blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34) asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35) 				   int rounds, int blocks, u8 iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36) asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37) 				   int rounds, int blocks, u8 iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38) asmlinkage void ce_aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39) 				   int rounds, int bytes, u8 const iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40) asmlinkage void ce_aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41) 				   int rounds, int bytes, u8 const iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43) asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44) 				   int rounds, int blocks, u8 ctr[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46) asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47) 				   int rounds, int bytes, u8 iv[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48) 				   u32 const rk2[], int first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49) asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) 				   int rounds, int bytes, u8 iv[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) 				   u32 const rk2[], int first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53) struct aes_block {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54) 	u8 b[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57) static int num_rounds(struct crypto_aes_ctx *ctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60) 	 * # of rounds specified by AES:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61) 	 * 128 bit key		10 rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62) 	 * 192 bit key		12 rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63) 	 * 256 bit key		14 rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64) 	 * => n byte key	=> 6 + (n/4) rounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66) 	return 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) 			    unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 	 * The AES key schedule round constants
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 	static u8 const rcon[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) 		0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 	};
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) 	u32 kwords = key_len / sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) 	struct aes_block *key_enc, *key_dec;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) 	int i, j;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 	if (key_len != AES_KEYSIZE_128 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) 	    key_len != AES_KEYSIZE_192 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) 	    key_len != AES_KEYSIZE_256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) 	ctx->key_length = key_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) 	for (i = 0; i < kwords; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) 		ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) 	kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) 	for (i = 0; i < sizeof(rcon); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) 		u32 *rki = ctx->key_enc + (i * kwords);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) 		u32 *rko = rki + kwords;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) 		rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) 		rko[0] = rko[0] ^ rki[0] ^ rcon[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) 		rko[1] = rko[0] ^ rki[1];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) 		rko[2] = rko[1] ^ rki[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) 		rko[3] = rko[2] ^ rki[3];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) 		if (key_len == AES_KEYSIZE_192) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) 			if (i >= 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) 				break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) 			rko[4] = rko[3] ^ rki[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 			rko[5] = rko[4] ^ rki[5];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) 		} else if (key_len == AES_KEYSIZE_256) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) 			if (i >= 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) 				break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) 			rko[4] = ce_aes_sub(rko[3]) ^ rki[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) 			rko[5] = rko[4] ^ rki[5];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) 			rko[6] = rko[5] ^ rki[6];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) 			rko[7] = rko[6] ^ rki[7];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) 	 * Generate the decryption keys for the Equivalent Inverse Cipher.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) 	 * This involves reversing the order of the round keys, and applying
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) 	 * the Inverse Mix Columns transformation on all but the first and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) 	 * the last one.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) 	key_enc = (struct aes_block *)ctx->key_enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) 	key_dec = (struct aes_block *)ctx->key_dec;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) 	j = num_rounds(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) 	key_dec[0] = key_enc[j];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) 	for (i = 1, j--; j > 0; i++, j--)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) 		ce_aes_invert(key_dec + i, key_enc + j);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 	key_dec[i] = key_enc[0];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) 	kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) static int ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) 			 unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) 	return ce_aes_expandkey(ctx, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) struct crypto_aes_xts_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) 	struct crypto_aes_ctx key1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) 	struct crypto_aes_ctx __aligned(8) key2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) 		       unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 	int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) 	ret = xts_verify_key(tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 	if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 		return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 	ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) 	if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) 		ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 				       key_len / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) static int ecb_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) 	unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) 		ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) 				   ctx->key_enc, num_rounds(ctx), blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) static int ecb_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) 	unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) 		ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) 				   ctx->key_dec, num_rounds(ctx), blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) static int cbc_encrypt_walk(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) 			    struct skcipher_walk *walk)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) 	unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) 	int err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) 	while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) 		ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) 				   ctx->key_enc, num_rounds(ctx), blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) 				   walk->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 		err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) static int cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) 	return cbc_encrypt_walk(req, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) static int cbc_decrypt_walk(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) 			    struct skcipher_walk *walk)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 	unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) 	int err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) 	while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) 		ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) 				   ctx->key_dec, num_rounds(ctx), blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) 				   walk->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) 		err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) static int cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) 	return cbc_decrypt_walk(req, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) static int cts_cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) 	int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) 	struct scatterlist *src = req->src, *dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) 	struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 	struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 	skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) 	skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) 				      NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) 	if (req->cryptlen <= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) 		if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) 			return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) 		cbc_blocks = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) 	if (cbc_blocks > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 					   cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) 					   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) 		err = skcipher_walk_virt(&walk, &subreq, false) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 		      cbc_encrypt_walk(&subreq, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) 		if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) 			return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) 		if (req->cryptlen == AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) 			return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) 		dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) 		if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) 			dst = scatterwalk_ffwd(sg_dst, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) 					       subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 	/* handle ciphertext stealing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) 	skcipher_request_set_crypt(&subreq, src, dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) 				   req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) 				   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 	err = skcipher_walk_virt(&walk, &subreq, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) 	kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) 	ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) 			       ctx->key_enc, num_rounds(ctx), walk.nbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) 			       walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) 	kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) 	return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) static int cts_cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) 	int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) 	struct scatterlist *src = req->src, *dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) 	struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) 	struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 	skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) 	skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) 				      NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 	if (req->cryptlen <= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 		if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 			return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) 		cbc_blocks = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) 	if (cbc_blocks > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) 					   cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) 					   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) 		err = skcipher_walk_virt(&walk, &subreq, false) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) 		      cbc_decrypt_walk(&subreq, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) 		if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) 			return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) 		if (req->cryptlen == AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) 			return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) 		dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) 		if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) 			dst = scatterwalk_ffwd(sg_dst, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) 					       subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) 	/* handle ciphertext stealing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) 	skcipher_request_set_crypt(&subreq, src, dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) 				   req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) 				   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) 	err = skcipher_walk_virt(&walk, &subreq, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) 	kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) 	ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) 			       ctx->key_dec, num_rounds(ctx), walk.nbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) 			       walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) 	kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) 	return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) static int ctr_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) 	int err, blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) 		ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) 				   ctx->key_enc, num_rounds(ctx), blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) 				   walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) 	if (walk.nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) 		u8 __aligned(8) tail[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) 		unsigned int nbytes = walk.nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) 		u8 *tdst = walk.dst.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) 		u8 *tsrc = walk.src.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) 		/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) 		 * Tell aes_ctr_encrypt() to process a tail block.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) 		 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) 		blocks = -1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) 		ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) 				   blocks, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) 		crypto_xor_cpy(tdst, tsrc, tail, nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) 		err = skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) 	unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) 	 * Temporarily disable interrupts to avoid races where
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) 	 * cachelines are evicted when the CPU is interrupted
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) 	 * to do something else.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) 	local_irq_save(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) 	aes_encrypt(ctx, dst, src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) 	local_irq_restore(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) static int ctr_encrypt_sync(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) 	if (!crypto_simd_usable())
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) 		return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) 	return ctr_encrypt(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) static int xts_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) 	int err, first, rounds = num_rounds(&ctx->key1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) 	int tail = req->cryptlen % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) 	struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) 	struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) 	struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) 	if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) 		int xts_blocks = DIV_ROUND_UP(req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) 					      AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) 		skcipher_walk_abort(&walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) 		skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) 		skcipher_request_set_callback(&subreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) 					      skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) 					      NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) 					   xts_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) 					   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) 		req = &subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) 		err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) 	} else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) 		tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) 	for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) 		int nbytes = walk.nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) 		if (walk.nbytes < walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) 			nbytes &= ~(AES_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) 		ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) 				   ctx->key1.key_enc, rounds, nbytes, walk.iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) 				   ctx->key2.key_enc, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) 	if (err || likely(!tail))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) 	dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) 	if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) 		dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) 	skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) 				   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) 	kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) 	ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) 			   ctx->key1.key_enc, rounds, walk.nbytes, walk.iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) 			   ctx->key2.key_enc, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) 	kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) 	return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) static int xts_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) 	int err, first, rounds = num_rounds(&ctx->key1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) 	int tail = req->cryptlen % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) 	struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) 	struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) 	struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) 	if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) 		int xts_blocks = DIV_ROUND_UP(req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) 					      AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) 		skcipher_walk_abort(&walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) 		skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) 		skcipher_request_set_callback(&subreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) 					      skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) 					      NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) 					   xts_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) 					   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) 		req = &subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) 		err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) 	} else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) 		tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) 	for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) 		int nbytes = walk.nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) 		if (walk.nbytes < walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) 			nbytes &= ~(AES_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) 		kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) 		ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) 				   ctx->key1.key_dec, rounds, nbytes, walk.iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) 				   ctx->key2.key_enc, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) 		kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) 	if (err || likely(!tail))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) 	dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) 	if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) 		dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) 	skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) 				   req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) 	err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) 	kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) 	ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) 			   ctx->key1.key_dec, rounds, walk.nbytes, walk.iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) 			   ctx->key2.key_enc, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) 	kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) 	return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) static struct skcipher_alg aes_algs[] = { {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) 	.base.cra_name		= "__ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) 	.base.cra_driver_name	= "__ecb-aes-ce",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) 	.base.cra_priority	= 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) 	.base.cra_flags		= CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) 	.base.cra_blocksize	= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) 	.base.cra_ctxsize	= sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) 	.base.cra_module	= THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) 	.min_keysize		= AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) 	.max_keysize		= AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) 	.setkey			= ce_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) 	.encrypt		= ecb_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) 	.decrypt		= ecb_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) 	.base.cra_name		= "__cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) 	.base.cra_driver_name	= "__cbc-aes-ce",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) 	.base.cra_priority	= 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) 	.base.cra_flags		= CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) 	.base.cra_blocksize	= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) 	.base.cra_ctxsize	= sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) 	.base.cra_module	= THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) 	.min_keysize		= AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) 	.max_keysize		= AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) 	.ivsize			= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) 	.setkey			= ce_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) 	.encrypt		= cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) 	.decrypt		= cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) 	.base.cra_name		= "__cts(cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) 	.base.cra_driver_name	= "__cts-cbc-aes-ce",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) 	.base.cra_priority	= 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) 	.base.cra_flags		= CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) 	.base.cra_blocksize	= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) 	.base.cra_ctxsize	= sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) 	.base.cra_module	= THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) 	.min_keysize		= AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) 	.max_keysize		= AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) 	.ivsize			= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) 	.walksize		= 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) 	.setkey			= ce_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) 	.encrypt		= cts_cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) 	.decrypt		= cts_cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) 	.base.cra_name		= "__ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) 	.base.cra_driver_name	= "__ctr-aes-ce",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) 	.base.cra_priority	= 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) 	.base.cra_flags		= CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) 	.base.cra_blocksize	= 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) 	.base.cra_ctxsize	= sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) 	.base.cra_module	= THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) 	.min_keysize		= AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) 	.max_keysize		= AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) 	.ivsize			= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) 	.chunksize		= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) 	.setkey			= ce_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) 	.encrypt		= ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) 	.decrypt		= ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) 	.base.cra_name		= "ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) 	.base.cra_driver_name	= "ctr-aes-ce-sync",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) 	.base.cra_priority	= 300 - 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) 	.base.cra_blocksize	= 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) 	.base.cra_ctxsize	= sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) 	.base.cra_module	= THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) 	.min_keysize		= AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) 	.max_keysize		= AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) 	.ivsize			= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) 	.chunksize		= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) 	.setkey			= ce_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) 	.encrypt		= ctr_encrypt_sync,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) 	.decrypt		= ctr_encrypt_sync,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) 	.base.cra_name		= "__xts(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) 	.base.cra_driver_name	= "__xts-aes-ce",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) 	.base.cra_priority	= 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) 	.base.cra_flags		= CRYPTO_ALG_INTERNAL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) 	.base.cra_blocksize	= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) 	.base.cra_ctxsize	= sizeof(struct crypto_aes_xts_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) 	.base.cra_module	= THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) 	.min_keysize		= 2 * AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) 	.max_keysize		= 2 * AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) 	.ivsize			= AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) 	.walksize		= 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) 	.setkey			= xts_set_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) 	.encrypt		= xts_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) 	.decrypt		= xts_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) } };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) static void aes_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) 	int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) 	for (i = 0; i < ARRAY_SIZE(aes_simd_algs) && aes_simd_algs[i]; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) 		simd_skcipher_free(aes_simd_algs[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) 	crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) static int __init aes_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) 	struct simd_skcipher_alg *simd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) 	const char *basename;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) 	const char *algname;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) 	const char *drvname;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701) 	int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) 	err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) 	for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) 		if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) 			continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) 		algname = aes_algs[i].base.cra_name + 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) 		drvname = aes_algs[i].base.cra_driver_name + 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) 		basename = aes_algs[i].base.cra_driver_name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) 		simd = simd_skcipher_create_compat(algname, drvname, basename);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) 		err = PTR_ERR(simd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) 		if (IS_ERR(simd))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) 			goto unregister_simds;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) 		aes_simd_algs[i] = simd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) unregister_simds:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) 	aes_exit();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) 	return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729) module_cpu_feature_match(AES, aes_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) module_exit(aes_exit);