Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  * Cryptographic API.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  * Support for OMAP AES GCM HW acceleration.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7)  * Copyright (c) 2016 Texas Instruments Incorporated
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11) #include <linux/scatterlist.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13) #include <linux/dmaengine.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14) #include <linux/omap-dma.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) #include <linux/interrupt.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #include <linux/pm_runtime.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) #include <crypto/gcm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) #include <crypto/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) #include <crypto/internal/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) #include "omap-crypto.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) #include "omap-aes.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26) static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27) 				     struct aead_request *req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29) static void omap_aes_gcm_finish_req(struct omap_aes_dev *dd, int ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31) 	struct aead_request *req = dd->aead_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33) 	dd->in_sg = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34) 	dd->out_sg = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36) 	crypto_finalize_aead_request(dd->engine, req, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38) 	pm_runtime_mark_last_busy(dd->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39) 	pm_runtime_put_autosuspend(dd->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42) static void omap_aes_gcm_done_task(struct omap_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44) 	u8 *tag;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45) 	int alen, clen, i, ret = 0, nsg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46) 	struct omap_aes_reqctx *rctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48) 	alen = ALIGN(dd->assoc_len, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49) 	clen = ALIGN(dd->total, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) 	rctx = aead_request_ctx(dd->aead_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52) 	nsg = !!(dd->assoc_len && dd->total);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54) 	dma_sync_sg_for_device(dd->dev, dd->out_sg, dd->out_sg_len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55) 			       DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56) 	dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57) 	dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58) 	omap_aes_crypt_dma_stop(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60) 	omap_crypto_cleanup(dd->out_sg, dd->orig_out,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61) 			    dd->aead_req->assoclen, dd->total,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62) 			    FLAGS_OUT_DATA_ST_SHIFT, dd->flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64) 	if (dd->flags & FLAGS_ENCRYPT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65) 		scatterwalk_map_and_copy(rctx->auth_tag,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66) 					 dd->aead_req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67) 					 dd->total + dd->aead_req->assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68) 					 dd->authsize, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) 	omap_crypto_cleanup(&dd->in_sgl[0], NULL, 0, alen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) 			    FLAGS_ASSOC_DATA_ST_SHIFT, dd->flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 	omap_crypto_cleanup(&dd->in_sgl[nsg], NULL, 0, clen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) 			    FLAGS_IN_DATA_ST_SHIFT, dd->flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) 	if (!(dd->flags & FLAGS_ENCRYPT)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 		tag = (u8 *)rctx->auth_tag;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) 		for (i = 0; i < dd->authsize; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) 			if (tag[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) 				ret = -EBADMSG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) 			}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) 	omap_aes_gcm_finish_req(dd, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) static int omap_aes_gcm_copy_buffers(struct omap_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) 				     struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) 	int alen, clen, cryptlen, assoclen, ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) 	unsigned int authlen = crypto_aead_authsize(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) 	struct scatterlist *tmp, sg_arr[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) 	int nsg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) 	u16 flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) 	assoclen = req->assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) 	cryptlen = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) 	if (dd->flags & FLAGS_RFC4106_GCM)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) 		assoclen -= 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) 	if (!(dd->flags & FLAGS_ENCRYPT))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) 		cryptlen -= authlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 	alen = ALIGN(assoclen, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) 	clen = ALIGN(cryptlen, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) 	nsg = !!(assoclen && cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) 	omap_aes_clear_copy_flags(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) 	sg_init_table(dd->in_sgl, nsg + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) 	if (assoclen) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) 		tmp = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 		ret = omap_crypto_align_sg(&tmp, assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) 					   AES_BLOCK_SIZE, dd->in_sgl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) 					   OMAP_CRYPTO_COPY_DATA |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) 					   OMAP_CRYPTO_ZERO_BUF |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) 					   OMAP_CRYPTO_FORCE_SINGLE_ENTRY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) 					   FLAGS_ASSOC_DATA_ST_SHIFT,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) 					   &dd->flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) 		if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) 			return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) 	if (cryptlen) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) 		tmp = scatterwalk_ffwd(sg_arr, req->src, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 		if (nsg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) 			sg_unmark_end(dd->in_sgl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) 		ret = omap_crypto_align_sg(&tmp, cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) 					   AES_BLOCK_SIZE, &dd->in_sgl[nsg],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) 					   OMAP_CRYPTO_COPY_DATA |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) 					   OMAP_CRYPTO_ZERO_BUF |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) 					   OMAP_CRYPTO_FORCE_SINGLE_ENTRY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) 					   FLAGS_IN_DATA_ST_SHIFT,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 					   &dd->flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) 		if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) 			return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) 	dd->in_sg = dd->in_sgl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) 	dd->total = cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) 	dd->assoc_len = assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) 	dd->authsize = authlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) 	dd->out_sg = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) 	dd->orig_out = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) 	dd->out_sg = scatterwalk_ffwd(sg_arr, req->dst, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 	flags = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) 	if (req->src == req->dst || dd->out_sg == sg_arr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 		flags |= OMAP_CRYPTO_FORCE_COPY;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) 	if (cryptlen) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 		ret = omap_crypto_align_sg(&dd->out_sg, cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) 					   AES_BLOCK_SIZE, &dd->out_sgl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) 					   flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 					   FLAGS_OUT_DATA_ST_SHIFT, &dd->flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) 		if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) 			return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) 	dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) 	dd->out_sg_len = sg_nents_for_len(dd->out_sg, clen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) static int do_encrypt_iv(struct aead_request *req, u32 *tag, u32 *iv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) 	struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) 	aes_encrypt(&ctx->actx, (u8 *)tag, (u8 *)iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) void omap_aes_gcm_dma_out_callback(void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) 	struct omap_aes_dev *dd = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) 	struct omap_aes_reqctx *rctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) 	int i, val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) 	u32 *auth_tag, tag[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) 	if (!(dd->flags & FLAGS_ENCRYPT))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) 		scatterwalk_map_and_copy(tag, dd->aead_req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) 					 dd->total + dd->aead_req->assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) 					 dd->authsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) 	rctx = aead_request_ctx(dd->aead_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) 	auth_tag = (u32 *)rctx->auth_tag;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) 	for (i = 0; i < 4; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) 		val = omap_aes_read(dd, AES_REG_TAG_N(dd, i));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) 		auth_tag[i] = val ^ auth_tag[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) 		if (!(dd->flags & FLAGS_ENCRYPT))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) 			auth_tag[i] = auth_tag[i] ^ tag[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) 	omap_aes_gcm_done_task(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) 				     struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) 	if (req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) 		return crypto_transfer_aead_request_to_engine(dd->engine, req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) static int omap_aes_gcm_prepare_req(struct crypto_engine *engine, void *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) 	struct aead_request *req = container_of(areq, struct aead_request,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) 						base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) 	struct omap_aes_dev *dd = rctx->dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 	struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 	int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) 	dd->aead_req = req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) 	rctx->mode &= FLAGS_MODE_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 	dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 	err = omap_aes_gcm_copy_buffers(dd, req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) 	dd->ctx = &ctx->octx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) 	return omap_aes_write_ctrl(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) static int omap_aes_gcm_crypt(struct aead_request *req, unsigned long mode)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 	unsigned int authlen = crypto_aead_authsize(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) 	struct omap_aes_dev *dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) 	__be32 counter = cpu_to_be32(1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) 	int err, assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) 	memset(rctx->auth_tag, 0, sizeof(rctx->auth_tag));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) 	memcpy(rctx->iv + GCM_AES_IV_SIZE, &counter, 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) 	err = do_encrypt_iv(req, (u32 *)rctx->auth_tag, (u32 *)rctx->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) 	if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) 		return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) 	if (mode & FLAGS_RFC4106_GCM)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) 		assoclen = req->assoclen - 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) 	else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) 		assoclen = req->assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 	if (assoclen + req->cryptlen == 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) 		scatterwalk_map_and_copy(rctx->auth_tag, req->dst, 0, authlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) 					 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) 		return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) 	dd = omap_aes_find_dev(rctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) 	if (!dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) 		return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) 	rctx->mode = mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) 	return omap_aes_gcm_handle_queue(dd, req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) int omap_aes_gcm_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) 	memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 	return omap_aes_gcm_crypt(req, FLAGS_ENCRYPT | FLAGS_GCM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) int omap_aes_gcm_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) 	memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) 	return omap_aes_gcm_crypt(req, FLAGS_GCM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) int omap_aes_4106gcm_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 	struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) 	memcpy(rctx->iv, ctx->octx.nonce, 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 	memcpy(rctx->iv + 4, req->iv, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) 	       omap_aes_gcm_crypt(req, FLAGS_ENCRYPT | FLAGS_GCM |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) 				  FLAGS_RFC4106_GCM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) int omap_aes_4106gcm_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) 	struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) 	memcpy(rctx->iv, ctx->octx.nonce, 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 	memcpy(rctx->iv + 4, req->iv, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) 	       omap_aes_gcm_crypt(req, FLAGS_GCM | FLAGS_RFC4106_GCM);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) int omap_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 			unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) 	struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) 	int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) 	ret = aes_expandkey(&ctx->actx, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) 	if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) 		return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) 	memcpy(ctx->octx.key, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) 	ctx->octx.keylen = keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) int omap_aes_4106gcm_setkey(struct crypto_aead *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) 			    unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) 	struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) 	int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) 	if (keylen < 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) 	keylen -= 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) 	ret = aes_expandkey(&ctx->actx, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) 	if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) 		return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 	memcpy(ctx->octx.key, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 	memcpy(ctx->octx.nonce, key + keylen, 4);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) 	ctx->octx.keylen = keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) int omap_aes_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) 	return crypto_gcm_check_authsize(authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) int omap_aes_4106gcm_setauthsize(struct crypto_aead *parent,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) 				 unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) 	return crypto_rfc4106_check_authsize(authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) static int omap_aes_gcm_crypt_req(struct crypto_engine *engine, void *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) 	struct aead_request *req = container_of(areq, struct aead_request,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) 						base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) 	struct omap_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) 	struct omap_aes_dev *dd = rctx->dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) 	int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) 	if (!dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) 		return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) 	if (dd->in_sg_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) 		ret = omap_aes_crypt_dma_start(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) 	else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) 		omap_aes_gcm_dma_out_callback(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) int omap_aes_gcm_cra_init(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) 	struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) 	ctx->enginectx.op.prepare_request = omap_aes_gcm_prepare_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) 	ctx->enginectx.op.unprepare_request = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) 	ctx->enginectx.op.do_one_request = omap_aes_gcm_crypt_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) 	crypto_aead_set_reqsize(tfm, sizeof(struct omap_aes_reqctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) }