^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/neon.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <asm/hwcap.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <asm/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/ctr.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <crypto/sha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <crypto/internal/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <crypto/internal/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <linux/cpufeature.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <crypto/xts.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include "aes-ce-setkey.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #ifdef USE_V8_CRYPTO_EXTENSIONS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #define MODE "ce"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #define PRIO 300
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define aes_expandkey ce_aes_expandkey
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #define aes_ecb_encrypt ce_aes_ecb_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #define aes_ecb_decrypt ce_aes_ecb_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define aes_cbc_encrypt ce_aes_cbc_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define aes_cbc_decrypt ce_aes_cbc_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #define aes_ctr_encrypt ce_aes_ctr_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #define aes_xts_encrypt ce_aes_xts_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #define aes_xts_decrypt ce_aes_xts_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #define aes_mac_update ce_aes_mac_update
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) #define MODE "neon"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) #define PRIO 200
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) #define aes_ecb_encrypt neon_aes_ecb_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) #define aes_ecb_decrypt neon_aes_ecb_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #define aes_cbc_encrypt neon_aes_cbc_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #define aes_cbc_decrypt neon_aes_cbc_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) #define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #define aes_ctr_encrypt neon_aes_ctr_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) #define aes_xts_encrypt neon_aes_xts_encrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) #define aes_xts_decrypt neon_aes_xts_decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) #define aes_mac_update neon_aes_mac_update
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) MODULE_ALIAS_CRYPTO("ecb(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) MODULE_ALIAS_CRYPTO("cbc(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) MODULE_ALIAS_CRYPTO("ctr(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) MODULE_ALIAS_CRYPTO("xts(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) MODULE_ALIAS_CRYPTO("cmac(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) MODULE_ALIAS_CRYPTO("xcbc(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) MODULE_ALIAS_CRYPTO("cbcmac(aes)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) /* defined in aes-modes.S */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) int rounds, int blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) int rounds, int blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) int rounds, int blocks, u8 iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) int rounds, int blocks, u8 iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) asmlinkage void aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) int rounds, int bytes, u8 const iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) asmlinkage void aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) int rounds, int bytes, u8 const iv[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) int rounds, int blocks, u8 ctr[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) int rounds, int bytes, u32 const rk2[], u8 iv[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) int first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) int rounds, int bytes, u32 const rk2[], u8 iv[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) int first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) asmlinkage void aes_essiv_cbc_encrypt(u8 out[], u8 const in[], u32 const rk1[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) int rounds, int blocks, u8 iv[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) u32 const rk2[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) asmlinkage void aes_essiv_cbc_decrypt(u8 out[], u8 const in[], u32 const rk1[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) int rounds, int blocks, u8 iv[],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) u32 const rk2[]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) asmlinkage int aes_mac_update(u8 const in[], u32 const rk[], int rounds,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) int blocks, u8 dg[], int enc_before,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) int enc_after);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) struct crypto_aes_xts_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) struct crypto_aes_ctx key1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) struct crypto_aes_ctx __aligned(8) key2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) struct crypto_aes_essiv_cbc_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) struct crypto_aes_ctx key1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) struct crypto_aes_ctx __aligned(8) key2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) struct crypto_shash *hash;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) struct mac_tfm_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) struct crypto_aes_ctx key;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) u8 __aligned(8) consts[];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) struct mac_desc_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) unsigned int len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) u8 dg[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) return aes_expandkey(ctx, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) static int __maybe_unused xts_set_key(struct crypto_skcipher *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) const u8 *in_key, unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) ret = xts_verify_key(tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) key_len / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) static int __maybe_unused essiv_cbc_set_key(struct crypto_skcipher *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) u8 digest[SHA256_DIGEST_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) ret = aes_expandkey(&ctx->key1, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) crypto_shash_tfm_digest(ctx->hash, in_key, key_len, digest);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) return aes_expandkey(&ctx->key2, digest, sizeof(digest));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) static int __maybe_unused ecb_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) int err, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) ctx->key_enc, rounds, blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) static int __maybe_unused ecb_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) int err, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) ctx->key_dec, rounds, blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) static int cbc_encrypt_walk(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) struct skcipher_walk *walk)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) int err = 0, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) ctx->key_enc, rounds, blocks, walk->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) static int __maybe_unused cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) return cbc_encrypt_walk(req, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) static int cbc_decrypt_walk(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) struct skcipher_walk *walk)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) int err = 0, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) ctx->key_dec, rounds, blocks, walk->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) static int __maybe_unused cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) return cbc_decrypt_walk(req, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) static int cts_cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) int err, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) struct scatterlist *src = req->src, *dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) if (req->cryptlen <= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) cbc_blocks = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) if (cbc_blocks > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) err = skcipher_walk_virt(&walk, &subreq, false) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) cbc_encrypt_walk(&subreq, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) if (req->cryptlen == AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) dst = scatterwalk_ffwd(sg_dst, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) /* handle ciphertext stealing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) skcipher_request_set_crypt(&subreq, src, dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) err = skcipher_walk_virt(&walk, &subreq, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) ctx->key_enc, rounds, walk.nbytes, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) static int cts_cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) int err, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) struct scatterlist *src = req->src, *dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) if (req->cryptlen <= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) cbc_blocks = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) if (cbc_blocks > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) err = skcipher_walk_virt(&walk, &subreq, false) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) cbc_decrypt_walk(&subreq, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) if (req->cryptlen == AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) dst = scatterwalk_ffwd(sg_dst, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) subreq.cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) /* handle ciphertext stealing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) skcipher_request_set_crypt(&subreq, src, dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) err = skcipher_walk_virt(&walk, &subreq, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) ctx->key_dec, rounds, walk.nbytes, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) static int __maybe_unused essiv_cbc_init_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) ctx->hash = crypto_alloc_shash("sha256", 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) return PTR_ERR_OR_ZERO(ctx->hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) static void __maybe_unused essiv_cbc_exit_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) crypto_free_shash(ctx->hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) int err, rounds = 6 + ctx->key1.key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) blocks = walk.nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) if (blocks) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) ctx->key1.key_enc, rounds, blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) req->iv, ctx->key2.key_enc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) return err ?: cbc_encrypt_walk(req, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) static int __maybe_unused essiv_cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) int err, rounds = 6 + ctx->key1.key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) unsigned int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) blocks = walk.nbytes / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) if (blocks) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) ctx->key1.key_dec, rounds, blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) req->iv, ctx->key2.key_enc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) return err ?: cbc_decrypt_walk(req, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) static int __maybe_unused ctr_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) int err, rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) int blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) ctx->key_enc, rounds, blocks, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) if (walk.nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) u8 __aligned(8) tail[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) unsigned int nbytes = walk.nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) u8 *tdst = walk.dst.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) u8 *tsrc = walk.src.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) * Tell aes_ctr_encrypt() to process a tail block.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) blocks = -1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) blocks, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) crypto_xor_cpy(tdst, tsrc, tail, nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) err = skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) static int __maybe_unused xts_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) int err, first, rounds = 6 + ctx->key1.key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) int tail = req->cryptlen % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) int xts_blocks = DIV_ROUND_UP(req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) skcipher_walk_abort(&walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) skcipher_request_set_callback(&subreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) xts_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) req = &subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) int nbytes = walk.nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) if (walk.nbytes < walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) nbytes &= ~(AES_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) ctx->key1.key_enc, rounds, nbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) ctx->key2.key_enc, walk.iv, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) if (err || likely(!tail))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) err = skcipher_walk_virt(&walk, &subreq, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) ctx->key1.key_enc, rounds, walk.nbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) ctx->key2.key_enc, walk.iv, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) static int __maybe_unused xts_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) int err, first, rounds = 6 + ctx->key1.key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) int tail = req->cryptlen % AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) struct scatterlist sg_src[2], sg_dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) struct skcipher_request subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) if (req->cryptlen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) int xts_blocks = DIV_ROUND_UP(req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) AES_BLOCK_SIZE) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) skcipher_walk_abort(&walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) skcipher_request_set_tfm(&subreq, tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) skcipher_request_set_callback(&subreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) skcipher_request_flags(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) skcipher_request_set_crypt(&subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) xts_blocks * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) req = &subreq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) err = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) tail = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) int nbytes = walk.nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) if (walk.nbytes < walk.total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) nbytes &= ~(AES_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) ctx->key1.key_dec, rounds, nbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) ctx->key2.key_enc, walk.iv, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) if (err || likely(!tail))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) err = skcipher_walk_virt(&walk, &subreq, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) ctx->key1.key_dec, rounds, walk.nbytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) ctx->key2.key_enc, walk.iv, first);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) return skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) static struct skcipher_alg aes_algs[] = { {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) .cra_name = "ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) .cra_driver_name = "ecb-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) .cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) .cra_ctxsize = sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) .setkey = skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) .encrypt = ecb_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) .decrypt = ecb_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) .cra_name = "cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) .cra_driver_name = "cbc-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) .cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) .cra_ctxsize = sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) .setkey = skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) .encrypt = cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) .decrypt = cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) .cra_name = "ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) .cra_driver_name = "ctr-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) .cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) .cra_ctxsize = sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) .chunksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) .setkey = skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) .encrypt = ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) .decrypt = ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) .cra_name = "xts(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) .cra_driver_name = "xts-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) .cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) .min_keysize = 2 * AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) .max_keysize = 2 * AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) .walksize = 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) .setkey = xts_set_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) .encrypt = xts_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) .decrypt = xts_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) .cra_name = "cts(cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) .cra_driver_name = "cts-cbc-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) .cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) .cra_ctxsize = sizeof(struct crypto_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) .walksize = 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) .setkey = skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) .encrypt = cts_cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) .decrypt = cts_cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) .cra_name = "essiv(cbc(aes),sha256)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) .cra_driver_name = "essiv-cbc-aes-sha256-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) .cra_priority = PRIO + 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) .cra_ctxsize = sizeof(struct crypto_aes_essiv_cbc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720) .setkey = essiv_cbc_set_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) .encrypt = essiv_cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) .decrypt = essiv_cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) .init = essiv_cbc_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) .exit = essiv_cbc_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) } };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727) static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) return aes_expandkey(&ctx->key, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735) static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737) u64 a = be64_to_cpu(x->a);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738) u64 b = be64_to_cpu(x->b);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740) y->a = cpu_to_be64((a << 1) | (b >> 63));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741) y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747) struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748) be128 *consts = (be128 *)ctx->consts;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749) int rounds = 6 + key_len / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752) err = cbcmac_setkey(tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756) /* encrypt the zero vector */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758) aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759) rounds, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762) cmac_gf128_mul_by_x(consts, consts);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) cmac_gf128_mul_by_x(consts + 1, consts);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769) unsigned int key_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771) static u8 const ks[3][AES_BLOCK_SIZE] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772) { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773) { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774) { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777) struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778) int rounds = 6 + key_len / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) u8 key[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782) err = cbcmac_setkey(tfm, in_key, key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787) aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788) aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791) return cbcmac_setkey(tfm, key, sizeof(key));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 792) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 793)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 794) static int mac_init(struct shash_desc *desc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 795) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 796) struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 797)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 798) memset(ctx->dg, 0, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 799) ctx->len = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 800)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 801) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 802) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 803)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 804) static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 805) u8 dg[], int enc_before, int enc_after)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 806) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 807) int rounds = 6 + ctx->key_length / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 808)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 809) if (crypto_simd_usable()) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 810) int rem;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 811)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 812) do {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 813) kernel_neon_begin();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 814) rem = aes_mac_update(in, ctx->key_enc, rounds, blocks,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 815) dg, enc_before, enc_after);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 816) kernel_neon_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 817) in += (blocks - rem) * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 818) blocks = rem;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 819) enc_before = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 820) } while (blocks);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 821) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 822) if (enc_before)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 823) aes_encrypt(ctx, dg, dg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 824)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 825) while (blocks--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 826) crypto_xor(dg, in, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 827) in += AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 828)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 829) if (blocks || enc_after)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 830) aes_encrypt(ctx, dg, dg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 831) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 832) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 833) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 834)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 835) static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 836) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 837) struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 838) struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 839)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 840) while (len > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 841) unsigned int l;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 842)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 843) if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 844) (ctx->len + len) > AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 845)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 846) int blocks = len / AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 847)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 848) len %= AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 849)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 850) mac_do_update(&tctx->key, p, blocks, ctx->dg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 851) (ctx->len != 0), (len != 0));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 852)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 853) p += blocks * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 854)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 855) if (!len) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 856) ctx->len = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 857) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 858) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 859) ctx->len = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 860) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 861)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 862) l = min(len, AES_BLOCK_SIZE - ctx->len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 863)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 864) if (l <= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 865) crypto_xor(ctx->dg + ctx->len, p, l);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 866) ctx->len += l;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 867) len -= l;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 868) p += l;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 869) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 870) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 871)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 872) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 873) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 874)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 875) static int cbcmac_final(struct shash_desc *desc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 876) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 877) struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 878) struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 879)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 880) mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 881)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 882) memcpy(out, ctx->dg, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 883)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 884) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 885) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 886)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 887) static int cmac_final(struct shash_desc *desc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 888) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 889) struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 890) struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 891) u8 *consts = tctx->consts;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 892)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 893) if (ctx->len != AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 894) ctx->dg[ctx->len] ^= 0x80;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 895) consts += AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 896) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 897)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 898) mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 899)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 900) memcpy(out, ctx->dg, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 901)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 902) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 903) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 904)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 905) static struct shash_alg mac_algs[] = { {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 906) .base.cra_name = "cmac(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 907) .base.cra_driver_name = "cmac-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 908) .base.cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 909) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 910) .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 911) 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 912) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 913)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 914) .digestsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 915) .init = mac_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 916) .update = mac_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 917) .final = cmac_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 918) .setkey = cmac_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 919) .descsize = sizeof(struct mac_desc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 920) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 921) .base.cra_name = "xcbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 922) .base.cra_driver_name = "xcbc-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 923) .base.cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 924) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 925) .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 926) 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 927) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 928)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 929) .digestsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 930) .init = mac_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 931) .update = mac_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 932) .final = cmac_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 933) .setkey = xcbc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 934) .descsize = sizeof(struct mac_desc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 935) }, {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 936) .base.cra_name = "cbcmac(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 937) .base.cra_driver_name = "cbcmac-aes-" MODE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 938) .base.cra_priority = PRIO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 939) .base.cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 940) .base.cra_ctxsize = sizeof(struct mac_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 941) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 942)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 943) .digestsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 944) .init = mac_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 945) .update = mac_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 946) .final = cbcmac_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 947) .setkey = cbcmac_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 948) .descsize = sizeof(struct mac_desc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 949) } };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 950)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 951) static void aes_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 952) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 953) crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 954) crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 955) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 956)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 957) static int __init aes_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 958) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 959) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 960)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 961) err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 962) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 963) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 964)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 965) err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 966) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 967) goto unregister_ciphers;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 968)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 969) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 970)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 971) unregister_ciphers:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 972) crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 973) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 974) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 975)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 976) #ifdef USE_V8_CRYPTO_EXTENSIONS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 977) module_cpu_feature_match(AES, aes_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 978) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 979) module_init(aes_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 980) EXPORT_SYMBOL(neon_aes_ecb_encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 981) EXPORT_SYMBOL(neon_aes_cbc_encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 982) EXPORT_SYMBOL(neon_aes_xts_encrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 983) EXPORT_SYMBOL(neon_aes_xts_decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 984) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 985) module_exit(aes_exit);