Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  * AES CTR routines supporting VMX instructions on the Power 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  * Copyright (C) 2015 International Business Machines Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7)  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10) #include <asm/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11) #include <asm/switch_to.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13) #include <crypto/internal/simd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #include "aesp8-ppc.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) struct p8_aes_ctr_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) 	struct crypto_skcipher *fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) 	struct aes_key enc_key;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26) 	struct crypto_skcipher *fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28) 	fallback = crypto_alloc_skcipher("ctr(aes)", 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29) 					 CRYPTO_ALG_NEED_FALLBACK |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30) 					 CRYPTO_ALG_ASYNC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31) 	if (IS_ERR(fallback)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32) 		pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33) 		       PTR_ERR(fallback));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34) 		return PTR_ERR(fallback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37) 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38) 				    crypto_skcipher_reqsize(fallback));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39) 	ctx->fallback = fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43) static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45) 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47) 	crypto_free_skcipher(ctx->fallback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) 			     unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53) 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54) 	int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56) 	preempt_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57) 	pagefault_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58) 	enable_kernel_vsx();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59) 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60) 	disable_kernel_vsx();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61) 	pagefault_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62) 	preempt_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64) 	ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66) 	return ret ? -EINVAL : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) 			     struct skcipher_walk *walk)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) 	u8 *ctrblk = walk->iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 	u8 keystream[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) 	u8 *src = walk->src.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 	u8 *dst = walk->dst.virt.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) 	unsigned int nbytes = walk->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) 	preempt_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) 	pagefault_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) 	enable_kernel_vsx();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) 	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 	disable_kernel_vsx();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 	pagefault_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) 	preempt_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) 	crypto_xor_cpy(dst, keystream, src, nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) 	crypto_inc(ctrblk, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) static int p8_aes_ctr_crypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) 	const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) 	struct skcipher_walk walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) 	unsigned int nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) 	int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) 	if (!crypto_simd_usable()) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) 		struct skcipher_request *subreq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) 		*subreq = *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) 		skcipher_request_set_tfm(subreq, ctx->fallback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) 		return crypto_skcipher_encrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) 	ret = skcipher_walk_virt(&walk, req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) 		preempt_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) 		pagefault_disable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) 		enable_kernel_vsx();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) 		aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) 					    walk.dst.virt.addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) 					    nbytes / AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) 					    &ctx->enc_key, walk.iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) 		disable_kernel_vsx();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) 		pagefault_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 		preempt_enable();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) 		do {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) 			crypto_inc(walk.iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) 		} while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) 		ret = skcipher_walk_done(&walk, nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) 	if (nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) 		p8_aes_ctr_final(ctx, &walk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) 		ret = skcipher_walk_done(&walk, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) struct skcipher_alg p8_aes_ctr_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) 	.base.cra_name = "ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) 	.base.cra_driver_name = "p8_aes_ctr",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) 	.base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) 	.base.cra_priority = 2000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) 	.base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) 	.base.cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) 	.base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 	.setkey = p8_aes_ctr_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) 	.encrypt = p8_aes_ctr_crypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) 	.decrypt = p8_aes_ctr_crypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) 	.init = p8_aes_ctr_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) 	.exit = p8_aes_ctr_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) 	.min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) 	.max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) 	.ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) 	.chunksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) };