^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-only
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Cipher algorithms supported by the CESA: DES, 3DES and AES.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Author: Boris Brezillon <boris.brezillon@free-electrons.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Author: Arnaud Ebalard <arno@natisbad.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * This work is based on an initial version written by
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Sebastian Andrzej Siewior < sebastian at breakpoint dot cc >
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <crypto/internal/des.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include "cesa.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) struct mv_cesa_des_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) struct mv_cesa_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) u8 key[DES_KEY_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) struct mv_cesa_des3_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) struct mv_cesa_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) u8 key[DES3_EDE_KEY_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) struct mv_cesa_aes_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) struct mv_cesa_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) struct mv_cesa_skcipher_dma_iter {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) struct mv_cesa_dma_iter base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) struct mv_cesa_sg_dma_iter src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) struct mv_cesa_sg_dma_iter dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) static inline void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) mv_cesa_skcipher_req_iter_init(struct mv_cesa_skcipher_dma_iter *iter,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) mv_cesa_sg_dma_iter_init(&iter->src, req->src, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) mv_cesa_sg_dma_iter_init(&iter->dst, req->dst, DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) static inline bool
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) mv_cesa_skcipher_req_iter_next_op(struct mv_cesa_skcipher_dma_iter *iter)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) iter->src.op_offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) iter->dst.op_offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) return mv_cesa_req_dma_iter_next_op(&iter->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) static inline void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) mv_cesa_skcipher_dma_cleanup(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) if (req->dst != req->src) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) mv_cesa_dma_cleanup(&creq->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) static inline void mv_cesa_skcipher_cleanup(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) mv_cesa_skcipher_dma_cleanup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) static void mv_cesa_skcipher_std_step(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) struct mv_cesa_skcipher_std_req *sreq = &creq->std;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) struct mv_cesa_engine *engine = creq->base.engine;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) size_t len = min_t(size_t, req->cryptlen - sreq->offset,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) CESA_SA_SRAM_PAYLOAD_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) mv_cesa_adjust_op(engine, &sreq->op);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) len = sg_pcopy_to_buffer(req->src, creq->src_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) engine->sram + CESA_SA_DATA_SRAM_OFFSET,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) len, sreq->offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) sreq->size = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) mv_cesa_set_crypt_op_len(&sreq->op, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) /* FIXME: only update enc_len field */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) if (!sreq->skip_ctx) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) sreq->skip_ctx = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op.desc));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) mv_cesa_set_int_mask(engine, CESA_SA_INT_ACCEL0_DONE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) writel_relaxed(CESA_SA_CFG_PARA_DIS, engine->regs + CESA_SA_CFG);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) WARN_ON(readl(engine->regs + CESA_SA_CMD) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) CESA_SA_CMD_EN_CESA_SA_ACCL0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) writel(CESA_SA_CMD_EN_CESA_SA_ACCL0, engine->regs + CESA_SA_CMD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) static int mv_cesa_skcipher_std_process(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) u32 status)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) struct mv_cesa_skcipher_std_req *sreq = &creq->std;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) struct mv_cesa_engine *engine = creq->base.engine;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) size_t len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) len = sg_pcopy_from_buffer(req->dst, creq->dst_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) engine->sram + CESA_SA_DATA_SRAM_OFFSET,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) sreq->size, sreq->offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) sreq->offset += len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) if (sreq->offset < req->cryptlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) static int mv_cesa_skcipher_process(struct crypto_async_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) u32 status)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) struct skcipher_request *skreq = skcipher_request_cast(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) struct mv_cesa_req *basereq = &creq->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) if (mv_cesa_req_get_type(basereq) == CESA_STD_REQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) return mv_cesa_skcipher_std_process(skreq, status);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) return mv_cesa_dma_process(basereq, status);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) static void mv_cesa_skcipher_step(struct crypto_async_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) struct skcipher_request *skreq = skcipher_request_cast(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) mv_cesa_dma_step(&creq->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) mv_cesa_skcipher_std_step(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) static inline void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) mv_cesa_skcipher_dma_prepare(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) struct mv_cesa_req *basereq = &creq->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) mv_cesa_dma_prepare(basereq, basereq->engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) static inline void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) mv_cesa_skcipher_std_prepare(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) struct mv_cesa_skcipher_std_req *sreq = &creq->std;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) sreq->size = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) sreq->offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) static inline void mv_cesa_skcipher_prepare(struct crypto_async_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) struct mv_cesa_engine *engine)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) struct skcipher_request *skreq = skcipher_request_cast(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) creq->base.engine = engine;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) mv_cesa_skcipher_dma_prepare(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) mv_cesa_skcipher_std_prepare(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) static inline void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) mv_cesa_skcipher_req_cleanup(struct crypto_async_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) struct skcipher_request *skreq = skcipher_request_cast(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) mv_cesa_skcipher_cleanup(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) static void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) mv_cesa_skcipher_complete(struct crypto_async_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) struct skcipher_request *skreq = skcipher_request_cast(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) struct mv_cesa_engine *engine = creq->base.engine;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) unsigned int ivsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) atomic_sub(skreq->cryptlen, &engine->load);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) ivsize = crypto_skcipher_ivsize(crypto_skcipher_reqtfm(skreq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) struct mv_cesa_req *basereq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) basereq = &creq->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) memcpy(skreq->iv, basereq->chain.last->op->ctx.skcipher.iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) ivsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) memcpy_fromio(skreq->iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) engine->sram + CESA_SA_CRYPT_IV_SRAM_OFFSET,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) ivsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) static const struct mv_cesa_req_ops mv_cesa_skcipher_req_ops = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) .step = mv_cesa_skcipher_step,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) .process = mv_cesa_skcipher_process,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) .cleanup = mv_cesa_skcipher_req_cleanup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) .complete = mv_cesa_skcipher_complete,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) static void mv_cesa_skcipher_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) void *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) memzero_explicit(ctx, tfm->__crt_alg->cra_ctxsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) static int mv_cesa_skcipher_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) struct mv_cesa_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) ctx->ops = &mv_cesa_skcipher_req_ops;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) sizeof(struct mv_cesa_skcipher_req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) static int mv_cesa_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) int remaining;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) int offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) ret = aes_expandkey(&ctx->aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) remaining = (ctx->aes.key_length - 16) / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) offset = ctx->aes.key_length + 24 - remaining;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) for (i = 0; i < remaining; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) ctx->aes.key_dec[4 + i] = ctx->aes.key_enc[offset + i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) static int mv_cesa_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) struct mv_cesa_des_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) err = verify_skcipher_des_key(cipher, key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) memcpy(ctx->key, key, DES_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) static int mv_cesa_des3_ede_setkey(struct crypto_skcipher *cipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) struct mv_cesa_des_ctx *ctx = crypto_skcipher_ctx(cipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) err = verify_skcipher_des3_key(cipher, key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) memcpy(ctx->key, key, DES3_EDE_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) static int mv_cesa_skcipher_dma_req_init(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) const struct mv_cesa_op_ctx *op_templ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) GFP_KERNEL : GFP_ATOMIC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) struct mv_cesa_req *basereq = &creq->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) struct mv_cesa_skcipher_dma_iter iter;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) bool skip_ctx = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) basereq->chain.first = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) basereq->chain.last = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) if (req->src != req->dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) ret = dma_map_sg(cesa_dev->dev, req->dst, creq->dst_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) if (!ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) ret = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) goto err_unmap_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) mv_cesa_tdma_desc_iter_init(&basereq->chain);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) mv_cesa_skcipher_req_iter_init(&iter, req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) do {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) struct mv_cesa_op_ctx *op;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) op = mv_cesa_dma_add_op(&basereq->chain, op_templ, skip_ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) if (IS_ERR(op)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) ret = PTR_ERR(op);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) goto err_free_tdma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) skip_ctx = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) mv_cesa_set_crypt_op_len(op, iter.base.op_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) /* Add input transfers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) ret = mv_cesa_dma_add_op_transfers(&basereq->chain, &iter.base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) &iter.src, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) goto err_free_tdma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) /* Add dummy desc to launch the crypto operation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) ret = mv_cesa_dma_add_dummy_launch(&basereq->chain, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) goto err_free_tdma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) /* Add output transfers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) ret = mv_cesa_dma_add_op_transfers(&basereq->chain, &iter.base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) &iter.dst, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) goto err_free_tdma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) } while (mv_cesa_skcipher_req_iter_next_op(&iter));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) /* Add output data for IV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) ret = mv_cesa_dma_add_result_op(&basereq->chain,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) CESA_SA_CFG_SRAM_OFFSET,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) CESA_SA_DATA_SRAM_OFFSET,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) CESA_TDMA_SRC_IN_SRAM, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) goto err_free_tdma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) basereq->chain.last->flags |= CESA_TDMA_END_OF_REQ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) err_free_tdma:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) mv_cesa_dma_cleanup(basereq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) if (req->dst != req->src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) err_unmap_src:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) req->dst != req->src ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) static inline int
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) mv_cesa_skcipher_std_req_init(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) const struct mv_cesa_op_ctx *op_templ)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) struct mv_cesa_skcipher_std_req *sreq = &creq->std;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) struct mv_cesa_req *basereq = &creq->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) sreq->op = *op_templ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) sreq->skip_ctx = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) basereq->chain.first = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) basereq->chain.last = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) static int mv_cesa_skcipher_req_init(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) unsigned int blksize = crypto_skcipher_blocksize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) if (!IS_ALIGNED(req->cryptlen, blksize))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) creq->src_nents = sg_nents_for_len(req->src, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) if (creq->src_nents < 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) dev_err(cesa_dev->dev, "Invalid number of src SG");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) return creq->src_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) if (creq->dst_nents < 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) dev_err(cesa_dev->dev, "Invalid number of dst SG");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) return creq->dst_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_OP_CRYPT_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) CESA_SA_DESC_CFG_OP_MSK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) if (cesa_dev->caps->has_tdma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) ret = mv_cesa_skcipher_dma_req_init(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) ret = mv_cesa_skcipher_std_req_init(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) static int mv_cesa_skcipher_queue_req(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) struct mv_cesa_engine *engine;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) ret = mv_cesa_skcipher_req_init(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) engine = mv_cesa_select_engine(req->cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) mv_cesa_skcipher_prepare(&req->base, engine);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) ret = mv_cesa_queue_req(&req->base, &creq->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) if (mv_cesa_req_needs_cleanup(&req->base, ret))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) mv_cesa_skcipher_cleanup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) static int mv_cesa_des_op(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) struct mv_cesa_des_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTM_DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) CESA_SA_DESC_CFG_CRYPTM_MSK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) memcpy(tmpl->ctx.skcipher.key, ctx->key, DES_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) return mv_cesa_skcipher_queue_req(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) static int mv_cesa_ecb_des_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) CESA_SA_DESC_CFG_CRYPTCM_ECB |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) CESA_SA_DESC_CFG_DIR_ENC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) return mv_cesa_des_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) static int mv_cesa_ecb_des_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) CESA_SA_DESC_CFG_CRYPTCM_ECB |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) CESA_SA_DESC_CFG_DIR_DEC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) return mv_cesa_des_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) struct skcipher_alg mv_cesa_ecb_des_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) .setkey = mv_cesa_des_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) .encrypt = mv_cesa_ecb_des_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) .decrypt = mv_cesa_ecb_des_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) .min_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) .max_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) .cra_name = "ecb(des)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) .cra_driver_name = "mv-ecb-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) CRYPTO_ALG_ALLOCATES_MEMORY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) .cra_ctxsize = sizeof(struct mv_cesa_des_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) .cra_init = mv_cesa_skcipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) .cra_exit = mv_cesa_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) static int mv_cesa_cbc_des_op(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTCM_CBC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) CESA_SA_DESC_CFG_CRYPTCM_MSK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) memcpy(tmpl->ctx.skcipher.iv, req->iv, DES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) return mv_cesa_des_op(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) static int mv_cesa_cbc_des_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_ENC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) return mv_cesa_cbc_des_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) static int mv_cesa_cbc_des_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_DEC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) return mv_cesa_cbc_des_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) struct skcipher_alg mv_cesa_cbc_des_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) .setkey = mv_cesa_des_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) .encrypt = mv_cesa_cbc_des_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) .decrypt = mv_cesa_cbc_des_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) .min_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) .max_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) .cra_name = "cbc(des)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) .cra_driver_name = "mv-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) CRYPTO_ALG_ALLOCATES_MEMORY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) .cra_ctxsize = sizeof(struct mv_cesa_des_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) .cra_init = mv_cesa_skcipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) .cra_exit = mv_cesa_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) static int mv_cesa_des3_op(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) struct mv_cesa_des3_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTM_3DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) CESA_SA_DESC_CFG_CRYPTM_MSK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) memcpy(tmpl->ctx.skcipher.key, ctx->key, DES3_EDE_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) return mv_cesa_skcipher_queue_req(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) static int mv_cesa_ecb_des3_ede_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) CESA_SA_DESC_CFG_CRYPTCM_ECB |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) CESA_SA_DESC_CFG_3DES_EDE |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) CESA_SA_DESC_CFG_DIR_ENC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) return mv_cesa_des3_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) static int mv_cesa_ecb_des3_ede_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) CESA_SA_DESC_CFG_CRYPTCM_ECB |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) CESA_SA_DESC_CFG_3DES_EDE |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) CESA_SA_DESC_CFG_DIR_DEC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) return mv_cesa_des3_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) struct skcipher_alg mv_cesa_ecb_des3_ede_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) .setkey = mv_cesa_des3_ede_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) .encrypt = mv_cesa_ecb_des3_ede_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) .decrypt = mv_cesa_ecb_des3_ede_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) .min_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) .max_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) .cra_name = "ecb(des3_ede)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) .cra_driver_name = "mv-ecb-des3-ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) CRYPTO_ALG_ALLOCATES_MEMORY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) .cra_ctxsize = sizeof(struct mv_cesa_des3_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) .cra_init = mv_cesa_skcipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) .cra_exit = mv_cesa_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) static int mv_cesa_cbc_des3_op(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) memcpy(tmpl->ctx.skcipher.iv, req->iv, DES3_EDE_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) return mv_cesa_des3_op(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) static int mv_cesa_cbc_des3_ede_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) CESA_SA_DESC_CFG_CRYPTCM_CBC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) CESA_SA_DESC_CFG_3DES_EDE |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) CESA_SA_DESC_CFG_DIR_ENC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) return mv_cesa_cbc_des3_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) static int mv_cesa_cbc_des3_ede_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) CESA_SA_DESC_CFG_CRYPTCM_CBC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) CESA_SA_DESC_CFG_3DES_EDE |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) CESA_SA_DESC_CFG_DIR_DEC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) return mv_cesa_cbc_des3_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) struct skcipher_alg mv_cesa_cbc_des3_ede_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) .setkey = mv_cesa_des3_ede_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) .encrypt = mv_cesa_cbc_des3_ede_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) .decrypt = mv_cesa_cbc_des3_ede_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) .min_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) .max_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) .cra_name = "cbc(des3_ede)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) .cra_driver_name = "mv-cbc-des3-ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) CRYPTO_ALG_ALLOCATES_MEMORY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) .cra_ctxsize = sizeof(struct mv_cesa_des3_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) .cra_init = mv_cesa_skcipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) .cra_exit = mv_cesa_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) static int mv_cesa_aes_op(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) u32 *key;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) u32 cfg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) cfg = CESA_SA_DESC_CFG_CRYPTM_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) if (mv_cesa_get_op_cfg(tmpl) & CESA_SA_DESC_CFG_DIR_DEC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) key = ctx->aes.key_dec;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) key = ctx->aes.key_enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) for (i = 0; i < ctx->aes.key_length / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) tmpl->ctx.skcipher.key[i] = cpu_to_le32(key[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) if (ctx->aes.key_length == 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) cfg |= CESA_SA_DESC_CFG_AES_LEN_192;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) else if (ctx->aes.key_length == 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) cfg |= CESA_SA_DESC_CFG_AES_LEN_256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) mv_cesa_update_op_cfg(tmpl, cfg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) CESA_SA_DESC_CFG_CRYPTM_MSK |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) CESA_SA_DESC_CFG_AES_LEN_MSK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) return mv_cesa_skcipher_queue_req(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) static int mv_cesa_ecb_aes_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) CESA_SA_DESC_CFG_CRYPTCM_ECB |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) CESA_SA_DESC_CFG_DIR_ENC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) return mv_cesa_aes_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) static int mv_cesa_ecb_aes_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) mv_cesa_set_op_cfg(&tmpl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733) CESA_SA_DESC_CFG_CRYPTCM_ECB |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734) CESA_SA_DESC_CFG_DIR_DEC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) return mv_cesa_aes_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739) struct skcipher_alg mv_cesa_ecb_aes_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740) .setkey = mv_cesa_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741) .encrypt = mv_cesa_ecb_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742) .decrypt = mv_cesa_ecb_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) .cra_name = "ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747) .cra_driver_name = "mv-ecb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749) .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) CRYPTO_ALG_ALLOCATES_MEMORY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752) .cra_ctxsize = sizeof(struct mv_cesa_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755) .cra_init = mv_cesa_skcipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756) .cra_exit = mv_cesa_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) static int mv_cesa_cbc_aes_op(struct skcipher_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761) struct mv_cesa_op_ctx *tmpl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTCM_CBC,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764) CESA_SA_DESC_CFG_CRYPTCM_MSK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765) memcpy(tmpl->ctx.skcipher.iv, req->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767) return mv_cesa_aes_op(req, tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770) static int mv_cesa_cbc_aes_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774) mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_ENC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776) return mv_cesa_cbc_aes_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) static int mv_cesa_cbc_aes_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781) struct mv_cesa_op_ctx tmpl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783) mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_DEC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785) return mv_cesa_cbc_aes_op(req, &tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788) struct skcipher_alg mv_cesa_cbc_aes_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789) .setkey = mv_cesa_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790) .encrypt = mv_cesa_cbc_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791) .decrypt = mv_cesa_cbc_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 792) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 793) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 794) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 795) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 796) .cra_name = "cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 797) .cra_driver_name = "mv-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 798) .cra_priority = 300,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 799) .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 800) CRYPTO_ALG_ALLOCATES_MEMORY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 801) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 802) .cra_ctxsize = sizeof(struct mv_cesa_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 803) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 804) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 805) .cra_init = mv_cesa_skcipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 806) .cra_exit = mv_cesa_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 807) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 808) };