Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags   |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  * AMCC SoC PPC4xx Crypto Driver
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  * Copyright (c) 2008 Applied Micro Circuits Corporation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  * All rights reserved. James Hsiao <jhsiao@amcc.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8)  * This file implements the Linux crypto algorithms.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12) #include <linux/interrupt.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13) #include <linux/spinlock_types.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14) #include <linux/scatterlist.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) #include <linux/crypto.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #include <linux/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) #include <crypto/internal/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) #include <crypto/algapi.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) #include <crypto/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) #include <crypto/gcm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) #include <crypto/sha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) #include <crypto/ctr.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) #include <crypto/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26) #include "crypto4xx_reg_def.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27) #include "crypto4xx_core.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28) #include "crypto4xx_sa.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30) static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31) 				     u32 save_iv, u32 ld_h, u32 ld_iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32) 				     u32 hdr_proc, u32 h, u32 c, u32 pad_type,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33) 				     u32 op_grp, u32 op, u32 dir)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35) 	sa->sa_command_0.w = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36) 	sa->sa_command_0.bf.save_hash_state = save_h;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37) 	sa->sa_command_0.bf.save_iv = save_iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38) 	sa->sa_command_0.bf.load_hash_state = ld_h;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39) 	sa->sa_command_0.bf.load_iv = ld_iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40) 	sa->sa_command_0.bf.hdr_proc = hdr_proc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41) 	sa->sa_command_0.bf.hash_alg = h;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42) 	sa->sa_command_0.bf.cipher_alg = c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43) 	sa->sa_command_0.bf.pad_type = pad_type & 3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44) 	sa->sa_command_0.bf.extend_pad = pad_type >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45) 	sa->sa_command_0.bf.op_group = op_grp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46) 	sa->sa_command_0.bf.opcode = op;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47) 	sa->sa_command_0.bf.dir = dir;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) 				     u32 hmac_mc, u32 cfb, u32 esn,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52) 				     u32 sn_mask, u32 mute, u32 cp_pad,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53) 				     u32 cp_pay, u32 cp_hdr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55) 	sa->sa_command_1.w = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56) 	sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57) 	sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58) 	sa->sa_command_1.bf.feedback_mode = cfb;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59) 	sa->sa_command_1.bf.sa_rev = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60) 	sa->sa_command_1.bf.hmac_muting = hmac_mc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61) 	sa->sa_command_1.bf.extended_seq_num = esn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62) 	sa->sa_command_1.bf.seq_num_mask = sn_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63) 	sa->sa_command_1.bf.mutable_bit_proc = mute;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64) 	sa->sa_command_1.bf.copy_pad = cp_pad;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65) 	sa->sa_command_1.bf.copy_payload = cp_pay;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66) 	sa->sa_command_1.bf.copy_hdr = cp_hdr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) static inline int crypto4xx_crypt(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) 				  const unsigned int ivlen, bool decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) 				  bool check_blocksize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 	__le32 iv[AES_IV_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 	if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) 	if (ivlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) 		crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) 		req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) 		ctx->sa_len, 0, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) 	return crypto4xx_crypt(req, 0, false, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) 	return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) 	return crypto4xx_crypt(req, 0, true, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) 	return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) 	return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) 	return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119)  * AES Functions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) 				const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) 				unsigned int keylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) 				unsigned char cm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) 				u8 fb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) 	struct dynamic_sa_ctl *sa;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) 	int    rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 	if (keylen != AES_KEYSIZE_256 && keylen != AES_KEYSIZE_192 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) 	    keylen != AES_KEYSIZE_128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) 	/* Create SA */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) 	if (ctx->sa_in || ctx->sa_out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) 		crypto4xx_free_sa(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) 	/* Setup SA */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) 	sa = ctx->sa_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) 	set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) 				 SA_NOT_SAVE_IV : SA_SAVE_IV),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) 				 SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) 				 SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) 				 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) 				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) 				 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) 				 DIR_INBOUND);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 	set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) 				 fb, SA_EXTENDED_SN_OFF,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) 				 SA_NOT_COPY_HDR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) 				 key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) 	sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 	sa->sa_command_1.bf.key_len = keylen >> 3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) 	sa = ctx->sa_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) 	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) 	 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) 	 * it's the DIR_(IN|OUT)BOUND that matters
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) 	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 				    CRYPTO_FEEDBACK_MODE_NO_FB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) 				    CRYPTO_FEEDBACK_MODE_128BIT_CFB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) 				    CRYPTO_FEEDBACK_MODE_NO_FB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) 				    CRYPTO_FEEDBACK_MODE_64BIT_OFB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) 	int rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) 	rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) 		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) 	ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) 						 CTR_RFC3686_NONCE_SIZE]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) 	__le32 iv[AES_IV_SIZE / 4] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 		ctx->iv_nonce,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) 		cpu_to_le32p((u32 *) req->iv),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 		cpu_to_le32p((u32 *) (req->iv + 4)),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 		cpu_to_le32(1) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) 				  req->cryptlen, iv, AES_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) 				  ctx->sa_out, ctx->sa_len, 0, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) 	__le32 iv[AES_IV_SIZE / 4] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 		ctx->iv_nonce,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) 		cpu_to_le32p((u32 *) req->iv),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) 		cpu_to_le32p((u32 *) (req->iv + 4)),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) 		cpu_to_le32(1) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) 				  req->cryptlen, iv, AES_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) 				  ctx->sa_out, ctx->sa_len, 0, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) static int
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) 	size_t iv_len = crypto_skcipher_ivsize(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 	unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) 	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) 			AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) 	 * The hardware uses only the last 32-bits as the counter while the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) 	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) 	 * the whole IV is a counter.  So fallback if the counter is going to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) 	 * overlow.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) 	if (counter + nblks < counter) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) 		int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) 		skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) 		skcipher_request_set_callback(subreq, req->base.flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 			NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 		skcipher_request_set_crypt(subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) 			req->cryptlen, req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 		ret = encrypt ? crypto_skcipher_encrypt(subreq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 			: crypto_skcipher_decrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) 		skcipher_request_zero(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) 		return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) 	return encrypt ? crypto4xx_encrypt_iv_stream(req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) 		       : crypto4xx_decrypt_iv_stream(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) 				       struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) 				       const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 				       unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) 	crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) 				    CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 	crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) 		crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) 	return crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) 	int rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) 	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) 	return crypto4xx_setkey_aes(cipher, key, keylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) 		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) int crypto4xx_encrypt_ctr(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) 	return crypto4xx_ctr_crypt(req, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) int crypto4xx_decrypt_ctr(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) 	return crypto4xx_ctr_crypt(req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 						unsigned int len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) 						bool is_ccm, bool decrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) 	/* authsize has to be a multiple of 4 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) 	if (aead->authsize & 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) 		return true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) 	 * hardware does not handle cases where plaintext
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) 	 * is less than a block.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) 	if (len < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) 		return true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 	/* assoc len needs to be a multiple of 4 and <= 1020 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 	if (req->assoclen & 0x3 || req->assoclen > 1020)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 		return true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) 	/* CCM supports only counter field length of 2 and 4 bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) 	if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) 		return true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) 	return false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) static int crypto4xx_aead_fallback(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) 	struct crypto4xx_ctx *ctx, bool do_decrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) 	struct aead_request *subreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) 	aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) 	aead_request_set_callback(subreq, req->base.flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) 				  req->base.complete, req->base.data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) 			       req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) 	aead_request_set_ad(subreq, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) 	return do_decrypt ? crypto_aead_decrypt(subreq) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) 			    crypto_aead_encrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) 					 struct crypto_aead *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) 					 const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) 					 unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) 	crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) 	crypto_aead_set_flags(ctx->sw_cipher.aead,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) 		crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) 	return crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378)  * AES-CCM Functions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) 			     unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) 	struct dynamic_sa_ctl *sa;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 	int rc = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) 	if (ctx->sa_in || ctx->sa_out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) 		crypto4xx_free_sa(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) 	/* Setup SA */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) 	sa = (struct dynamic_sa_ctl *) ctx->sa_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) 	sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) 				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) 				 SA_CIPHER_ALG_AES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) 				 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) 				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) 				 SA_NOT_COPY_HDR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) 	sa->sa_command_1.bf.key_len = keylen >> 3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) 				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) 				 SA_CIPHER_ALG_AES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) 				 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) 				 SA_COPY_PAD, SA_COPY_PAYLOAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) 				 SA_NOT_COPY_HDR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) 	sa->sa_command_1.bf.key_len = keylen >> 3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) 	struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) 	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) 	__le32 iv[16];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) 	u32 tmp_sa[SA_AES128_CCM_LEN + 4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) 	struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) 	unsigned int len = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) 	if (decrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) 		len -= crypto_aead_authsize(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) 	if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) 		return crypto4xx_aead_fallback(req, ctx, decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) 	memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) 	sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) 	if (req->iv[0] == 1) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) 		/* CRYPTO_MODE_AES_ICM */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) 		sa->sa_command_1.bf.crypto_mode9_8 = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) 	iv[3] = cpu_to_le32(0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) 	crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) 				  len, iv, sizeof(iv),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) 				  sa, ctx->sa_len, req->assoclen, rctx->dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) 	return crypto4xx_crypt_aes_ccm(req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) 	return crypto4xx_crypt_aes_ccm(req, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) 			       unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) 	return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493)  * AES-GCM Functions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) 	switch (keylen) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) 	case 16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) 	case 24:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) 	case 32:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) 		return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) 	default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) 					     unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) 	struct crypto_aes_ctx ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) 	uint8_t src[16] = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) 	int rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) 	rc = aes_expandkey(&ctx, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) 	if (rc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) 		pr_err("aes_expandkey() failed: %d\n", rc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) 	aes_encrypt(&ctx, src, src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) 	crypto4xx_memcpy_to_le32(hash_start, src, 16);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) 	memzero_explicit(&ctx, sizeof(ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) 			     const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) 	struct dynamic_sa_ctl *sa;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) 	int    rc = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) 	if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) 	if (ctx->sa_in || ctx->sa_out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) 		crypto4xx_free_sa(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) 	sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) 	sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) 				 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) 				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) 				 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) 				 DIR_INBOUND);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) 				 SA_SEQ_MASK_ON, SA_MC_DISABLE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) 				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) 				 SA_NOT_COPY_HDR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) 	sa->sa_command_1.bf.key_len = keylen >> 3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) 				 key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) 	rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) 		key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) 	if (rc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) 		pr_err("GCM hash key setting failed = %d\n", rc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) 		goto err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) 	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) 	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) 	crypto4xx_free_sa(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) 	return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) 					  bool decrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) 	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) 	__le32 iv[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) 	unsigned int len = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) 	if (decrypt)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) 		len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) 	if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) 		return crypto4xx_aead_fallback(req, ctx, decrypt);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) 	crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) 	iv[3] = cpu_to_le32(1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) 				  len, iv, sizeof(iv),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) 				  decrypt ? ctx->sa_in : ctx->sa_out,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) 				  ctx->sa_len, req->assoclen, rctx->dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) 	return crypto4xx_crypt_aes_gcm(req, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) 	return crypto4xx_crypt_aes_gcm(req, true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621)  * HASH SHA1 Functions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) 				   unsigned int sa_len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) 				   unsigned char ha,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) 				   unsigned char hm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) 	struct crypto_alg *alg = tfm->__crt_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) 	struct crypto4xx_alg *my_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) 	struct dynamic_sa_hash160 *sa;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) 	int rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) 	my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) 			      alg.u.hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) 	ctx->dev   = my_alg->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) 	/* Create SA */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) 	if (ctx->sa_in || ctx->sa_out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) 		crypto4xx_free_sa(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) 	rc = crypto4xx_alloc_sa(ctx, sa_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) 	if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) 		return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) 				 sizeof(struct crypto4xx_ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) 	sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) 	set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) 				 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) 				 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) 				 SA_OPCODE_HASH, DIR_INBOUND);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) 	set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) 				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) 				 SA_NOT_COPY_HDR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) 	/* Need to zero hash digest in SA */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) 	memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) 	memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) int crypto4xx_hash_init(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) 	int ds;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) 	struct dynamic_sa_ctl *sa;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) 	sa = ctx->sa_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) 	ds = crypto_ahash_digestsize(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) 			__crypto_ahash_cast(req->base.tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) 	sa->sa_command_0.bf.digest_len = ds >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) 	sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) int crypto4xx_hash_update(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) 	struct scatterlist dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) 	unsigned int ds = crypto_ahash_digestsize(ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) 	sg_init_one(&dst, req->result, ds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) 				  req->nbytes, NULL, 0, ctx->sa_in,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) 				  ctx->sa_len, 0, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) int crypto4xx_hash_final(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) int crypto4xx_hash_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) 	struct scatterlist dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) 	unsigned int ds = crypto_ahash_digestsize(ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) 	sg_init_one(&dst, req->result, ds);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) 				  req->nbytes, NULL, 0, ctx->sa_in,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) 				  ctx->sa_len, 0, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715)  * SHA1 Algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) 	return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720) 				       SA_HASH_MODE_HASH);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) }