^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Cryptographic API.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Support for ATMEL AES HW acceleration.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Copyright (c) 2012 Eukréa Electromatique - ATMEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Author: Nicolas Royer <nicolas@eukrea.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * Some ideas are from omap-aes.c driver.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/slab.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/err.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <linux/clk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <linux/io.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <linux/hw_random.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <linux/platform_device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include <linux/device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <linux/dmaengine.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #include <linux/init.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #include <linux/interrupt.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #include <linux/irq.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #include <linux/scatterlist.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #include <linux/of_device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #include <linux/delay.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #include <linux/crypto.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #include <crypto/algapi.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #include <crypto/gcm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #include <crypto/xts.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #include <crypto/internal/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #include "atmel-aes-regs.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) #include "atmel-authenc.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) #define ATMEL_AES_PRIORITY 300
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #define ATMEL_AES_BUFFER_ORDER 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) #define CFB8_BLOCK_SIZE 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #define CFB16_BLOCK_SIZE 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) #define CFB32_BLOCK_SIZE 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #define CFB64_BLOCK_SIZE 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) #define SIZE_IN_WORDS(x) ((x) >> 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) /* AES flags */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) AES_FLAGS_ENCRYPT | \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) AES_FLAGS_GTAGEN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) #define AES_FLAGS_BUSY BIT(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) #define AES_FLAGS_DUMP_REG BIT(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) #define AES_FLAGS_OWN_SHA BIT(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) #define ATMEL_AES_QUEUE_LENGTH 50
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #define ATMEL_AES_DMA_THRESHOLD 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) struct atmel_aes_caps {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) bool has_dualbuff;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) bool has_cfb64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) bool has_gcm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) bool has_xts;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) bool has_authenc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) u32 max_burst_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) struct atmel_aes_dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) typedef int (*atmel_aes_fn_t)(struct atmel_aes_dev *);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) struct atmel_aes_base_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) struct atmel_aes_dev *dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) atmel_aes_fn_t start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) int keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) u32 key[AES_KEYSIZE_256 / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) u16 block_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) bool is_aead;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) struct atmel_aes_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) struct atmel_aes_base_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) struct atmel_aes_ctr_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) struct atmel_aes_base_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) __be32 iv[AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) size_t offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) struct scatterlist src[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) struct scatterlist dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) u32 blocks;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) struct atmel_aes_gcm_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) struct atmel_aes_base_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) struct scatterlist src[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) struct scatterlist dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) __be32 j0[AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) u32 tag[AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) __be32 ghash[AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) size_t textlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) const __be32 *ghash_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) __be32 *ghash_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) atmel_aes_fn_t ghash_resume;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) struct atmel_aes_xts_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) struct atmel_aes_base_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) u32 key2[AES_KEYSIZE_256 / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) struct atmel_aes_authenc_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) struct atmel_aes_base_ctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) struct atmel_sha_authenc_ctx *auth;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) struct atmel_aes_reqctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) unsigned long mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) u8 lastc[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) struct atmel_aes_authenc_reqctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) struct atmel_aes_reqctx base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) struct scatterlist src[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) struct scatterlist dst[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) size_t textlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) u32 digest[SHA512_DIGEST_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) /* auth_req MUST be place last. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) struct ahash_request auth_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) struct atmel_aes_dma {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) struct dma_chan *chan;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) struct scatterlist *sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) int nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) unsigned int remainder;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) unsigned int sg_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) struct atmel_aes_dev {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) struct list_head list;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) unsigned long phys_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) void __iomem *io_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) struct crypto_async_request *areq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) struct atmel_aes_base_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) bool is_async;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) atmel_aes_fn_t resume;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) atmel_aes_fn_t cpu_transfer_complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) struct device *dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) struct clk *iclk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) int irq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) spinlock_t lock;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) struct crypto_queue queue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) struct tasklet_struct done_task;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) struct tasklet_struct queue_task;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) size_t total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) size_t datalen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) u32 *data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) struct atmel_aes_dma src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) struct atmel_aes_dma dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) size_t buflen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) void *buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) struct scatterlist aligned_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) struct scatterlist *real_dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) struct atmel_aes_caps caps;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) u32 hw_version;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) struct atmel_aes_drv {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) struct list_head dev_list;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) spinlock_t lock;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) static struct atmel_aes_drv atmel_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) .dev_list = LIST_HEAD_INIT(atmel_aes.dev_list),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) .lock = __SPIN_LOCK_UNLOCKED(atmel_aes.lock),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) #ifdef VERBOSE_DEBUG
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) static const char *atmel_aes_reg_name(u32 offset, char *tmp, size_t sz)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) switch (offset) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) case AES_CR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) return "CR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) case AES_MR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) return "MR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) case AES_ISR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) return "ISR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) case AES_IMR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) return "IMR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) case AES_IER:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) return "IER";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) case AES_IDR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) return "IDR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) case AES_KEYWR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) case AES_KEYWR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) case AES_KEYWR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) case AES_KEYWR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) case AES_KEYWR(4):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) case AES_KEYWR(5):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) case AES_KEYWR(6):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) case AES_KEYWR(7):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) snprintf(tmp, sz, "KEYWR[%u]", (offset - AES_KEYWR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) case AES_IDATAR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) case AES_IDATAR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) case AES_IDATAR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) case AES_IDATAR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) snprintf(tmp, sz, "IDATAR[%u]", (offset - AES_IDATAR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) case AES_ODATAR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) case AES_ODATAR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) case AES_ODATAR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) case AES_ODATAR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) snprintf(tmp, sz, "ODATAR[%u]", (offset - AES_ODATAR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) case AES_IVR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) case AES_IVR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) case AES_IVR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) case AES_IVR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) snprintf(tmp, sz, "IVR[%u]", (offset - AES_IVR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) case AES_AADLENR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) return "AADLENR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) case AES_CLENR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) return "CLENR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) case AES_GHASHR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) case AES_GHASHR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) case AES_GHASHR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) case AES_GHASHR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) snprintf(tmp, sz, "GHASHR[%u]", (offset - AES_GHASHR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) case AES_TAGR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) case AES_TAGR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) case AES_TAGR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) case AES_TAGR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) snprintf(tmp, sz, "TAGR[%u]", (offset - AES_TAGR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) case AES_CTRR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) return "CTRR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) case AES_GCMHR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) case AES_GCMHR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) case AES_GCMHR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) case AES_GCMHR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) snprintf(tmp, sz, "GCMHR[%u]", (offset - AES_GCMHR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) case AES_EMR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) return "EMR";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) case AES_TWR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) case AES_TWR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) case AES_TWR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) case AES_TWR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) snprintf(tmp, sz, "TWR[%u]", (offset - AES_TWR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) case AES_ALPHAR(0):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) case AES_ALPHAR(1):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) case AES_ALPHAR(2):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) case AES_ALPHAR(3):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) snprintf(tmp, sz, "ALPHAR[%u]", (offset - AES_ALPHAR(0)) >> 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) snprintf(tmp, sz, "0x%02x", offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) return tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) #endif /* VERBOSE_DEBUG */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) /* Shared functions */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) u32 value = readl_relaxed(dd->io_base + offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) #ifdef VERBOSE_DEBUG
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) if (dd->flags & AES_FLAGS_DUMP_REG) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) char tmp[16];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) dev_vdbg(dd->dev, "read 0x%08x from %s\n", value,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) atmel_aes_reg_name(offset, tmp, sizeof(tmp)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) #endif /* VERBOSE_DEBUG */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) return value;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) static inline void atmel_aes_write(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) u32 offset, u32 value)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) #ifdef VERBOSE_DEBUG
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) if (dd->flags & AES_FLAGS_DUMP_REG) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) char tmp[16];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) dev_vdbg(dd->dev, "write 0x%08x into %s\n", value,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) atmel_aes_reg_name(offset, tmp, sizeof(tmp)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) #endif /* VERBOSE_DEBUG */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) writel_relaxed(value, dd->io_base + offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) u32 *value, int count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) for (; count--; value++, offset += 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) *value = atmel_aes_read(dd, offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) const u32 *value, int count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) for (; count--; value++, offset += 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) atmel_aes_write(dd, offset, *value);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) void *value)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) const void *value)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) atmel_aes_fn_t resume)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) u32 isr = atmel_aes_read(dd, AES_ISR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) if (unlikely(isr & AES_INT_DATARDY))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) return resume(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) dd->resume = resume;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) static inline size_t atmel_aes_padlen(size_t len, size_t block_size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) len &= block_size - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) return len ? block_size - len : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_base_ctx *ctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) struct atmel_aes_dev *aes_dd = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) struct atmel_aes_dev *tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) spin_lock_bh(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) if (!ctx->dd) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) list_for_each_entry(tmp, &atmel_aes.dev_list, list) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) aes_dd = tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) ctx->dd = aes_dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) aes_dd = ctx->dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) spin_unlock_bh(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) return aes_dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) static int atmel_aes_hw_init(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) err = clk_enable(dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) atmel_aes_write(dd, AES_CR, AES_CR_SWRST);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) err = atmel_aes_hw_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) dd->hw_version = atmel_aes_get_version(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) dev_info(dd->dev, "version: 0x%x\n", dd->hw_version);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) clk_disable(dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) const struct atmel_aes_reqctx *rctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) /* Clear all but persistent flags and set request flags. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) return (dd->flags & AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) if (req->cryptlen < ivsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) if (rctx->mode & AES_FLAGS_ENCRYPT) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) scatterwalk_map_and_copy(req->iv, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) req->cryptlen - ivsize, ivsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) if (req->src == req->dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) memcpy(req->iv, rctx->lastc, ivsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) scatterwalk_map_and_copy(req->iv, req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) req->cryptlen - ivsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) ivsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) static inline struct atmel_aes_ctr_ctx *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) return container_of(ctx, struct atmel_aes_ctr_ctx, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) * The CTR transfer works in fragments of data of maximum 1 MByte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) * because of the 16 bit CTR counter embedded in the IP. When reaching
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) * here, ctx->blocks contains the number of blocks of the last fragment
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) * processed, there is no need to explicit cast it to u16.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) for (i = 0; i < ctx->blocks; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) memcpy(req->iv, ctx->iv, ivsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) if (dd->ctx->is_aead)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) atmel_aes_authenc_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) clk_disable(dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) dd->flags &= ~AES_FLAGS_BUSY;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) if (!err && !dd->ctx->is_aead &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) (rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) if ((rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_CTR)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) atmel_aes_set_iv_as_last_ciphertext_block(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) atmel_aes_ctr_update_req_iv(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) if (dd->is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) dd->areq->complete(dd->areq, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) tasklet_schedule(&dd->queue_task);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) const __be32 *iv, const u32 *key, int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) u32 valmr = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) /* MR register must be set before IV registers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) if (keylen == AES_KEYSIZE_128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) valmr |= AES_MR_KEYSIZE_128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) else if (keylen == AES_KEYSIZE_192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) valmr |= AES_MR_KEYSIZE_192;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) valmr |= AES_MR_KEYSIZE_256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) valmr |= dd->flags & AES_FLAGS_MODE_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) if (use_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) valmr |= AES_MR_SMOD_IDATAR0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) if (dd->caps.has_dualbuff)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) valmr |= AES_MR_DUALBUFF;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) valmr |= AES_MR_SMOD_AUTO;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) atmel_aes_write(dd, AES_MR, valmr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) if (iv && (valmr & AES_MR_OPMOD_MASK) != AES_MR_OPMOD_ECB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) atmel_aes_write_block(dd, AES_IVR(0), iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) const __be32 *iv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) atmel_aes_write_ctrl_key(dd, use_dma, iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) dd->ctx->key, dd->ctx->keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) /* CPU transfer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) int err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) u32 isr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) for (;;) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) atmel_aes_read_block(dd, AES_ODATAR(0), dd->data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) dd->data += 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) dd->datalen -= AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) if (dd->datalen < AES_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) isr = atmel_aes_read(dd, AES_ISR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) if (!(isr & AES_INT_DATARDY)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) dd->resume = atmel_aes_cpu_transfer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) dd->buf, dd->total))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) err = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) return dd->cpu_transfer_complete(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) static int atmel_aes_cpu_start(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) struct scatterlist *src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) struct scatterlist *dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) size_t len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) atmel_aes_fn_t resume)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) if (unlikely(len == 0))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) sg_copy_to_buffer(src, sg_nents(src), dd->buf, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) dd->total = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) dd->real_dst = dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) dd->cpu_transfer_complete = resume;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) dd->datalen = len + padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) dd->data = (u32 *)dd->buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) /* DMA transfer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) static void atmel_aes_dma_callback(void *data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) struct scatterlist *sg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) size_t len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) struct atmel_aes_dma *dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) int nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) if (!IS_ALIGNED(len, dd->ctx->block_size))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) return false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) for (nents = 0; sg; sg = sg_next(sg), ++nents) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) if (!IS_ALIGNED(sg->offset, sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) return false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) if (len <= sg->length) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) if (!IS_ALIGNED(len, dd->ctx->block_size))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) return false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) dma->nents = nents+1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) dma->remainder = sg->length - len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) sg->length = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) return true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) if (!IS_ALIGNED(sg->length, dd->ctx->block_size))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) return false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) len -= sg->length;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) return false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) static inline void atmel_aes_restore_sg(const struct atmel_aes_dma *dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) struct scatterlist *sg = dma->sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) int nents = dma->nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) if (!dma->remainder)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) while (--nents > 0 && sg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) sg = sg_next(sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) if (!sg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) sg->length += dma->remainder;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) static int atmel_aes_map(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) struct scatterlist *src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) struct scatterlist *dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) size_t len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727) bool src_aligned, dst_aligned;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) size_t padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) dd->total = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731) dd->src.sg = src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) dd->dst.sg = dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733) dd->real_dst = dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735) src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) if (src == dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737) dst_aligned = src_aligned;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739) dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740) if (!src_aligned || !dst_aligned) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741) padlen = atmel_aes_padlen(len, dd->ctx->block_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743) if (dd->buflen < len + padlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) if (!src_aligned) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747) sg_copy_to_buffer(src, sg_nents(src), dd->buf, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748) dd->src.sg = &dd->aligned_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749) dd->src.nents = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) dd->src.remainder = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) if (!dst_aligned) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754) dd->dst.sg = &dd->aligned_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755) dd->dst.nents = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756) dd->dst.remainder = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759) sg_init_table(&dd->aligned_sg, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) if (dd->src.sg == dd->dst.sg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764) dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765) DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766) dd->dst.sg_len = dd->src.sg_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767) if (!dd->src.sg_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770) dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772) if (!dd->src.sg_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775) dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777) if (!dd->dst.sg_len) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778) dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787) static void atmel_aes_unmap(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789) if (dd->src.sg == dd->dst.sg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790) dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791) DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 792)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 793) if (dd->src.sg != &dd->aligned_sg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 794) atmel_aes_restore_sg(&dd->src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 795) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 796) dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 797) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 798)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 799) if (dd->dst.sg != &dd->aligned_sg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 800) atmel_aes_restore_sg(&dd->dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 801)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 802) dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 803) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 804)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 805) if (dd->src.sg != &dd->aligned_sg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 806) atmel_aes_restore_sg(&dd->src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 807) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 808)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 809) if (dd->dst.sg == &dd->aligned_sg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 810) sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 811) dd->buf, dd->total);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 812) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 813)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 814) static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 815) enum dma_slave_buswidth addr_width,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 816) enum dma_transfer_direction dir,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 817) u32 maxburst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 818) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 819) struct dma_async_tx_descriptor *desc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 820) struct dma_slave_config config;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 821) dma_async_tx_callback callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 822) struct atmel_aes_dma *dma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 823) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 824)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 825) memset(&config, 0, sizeof(config));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 826) config.src_addr_width = addr_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 827) config.dst_addr_width = addr_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 828) config.src_maxburst = maxburst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 829) config.dst_maxburst = maxburst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 830)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 831) switch (dir) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 832) case DMA_MEM_TO_DEV:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 833) dma = &dd->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 834) callback = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 835) config.dst_addr = dd->phys_base + AES_IDATAR(0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 836) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 837)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 838) case DMA_DEV_TO_MEM:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 839) dma = &dd->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 840) callback = atmel_aes_dma_callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 841) config.src_addr = dd->phys_base + AES_ODATAR(0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 842) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 843)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 844) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 845) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 846) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 847)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 848) err = dmaengine_slave_config(dma->chan, &config);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 849) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 850) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 851)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 852) desc = dmaengine_prep_slave_sg(dma->chan, dma->sg, dma->sg_len, dir,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 853) DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 854) if (!desc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 855) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 856)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 857) desc->callback = callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 858) desc->callback_param = dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 859) dmaengine_submit(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 860) dma_async_issue_pending(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 861)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 862) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 863) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 864)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 865) static int atmel_aes_dma_start(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 866) struct scatterlist *src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 867) struct scatterlist *dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 868) size_t len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 869) atmel_aes_fn_t resume)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 870) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 871) enum dma_slave_buswidth addr_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 872) u32 maxburst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 873) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 874)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 875) switch (dd->ctx->block_size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 876) case CFB8_BLOCK_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 877) addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 878) maxburst = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 879) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 880)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 881) case CFB16_BLOCK_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 882) addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 883) maxburst = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 884) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 885)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 886) case CFB32_BLOCK_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 887) case CFB64_BLOCK_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 888) addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 889) maxburst = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 890) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 891)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 892) case AES_BLOCK_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 893) addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 894) maxburst = dd->caps.max_burst_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 895) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 896)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 897) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 898) err = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 899) goto exit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 900) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 901)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 902) err = atmel_aes_map(dd, src, dst, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 903) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 904) goto exit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 905)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 906) dd->resume = resume;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 907)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 908) /* Set output DMA transfer first */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 909) err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 910) maxburst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 911) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 912) goto unmap;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 913)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 914) /* Then set input DMA transfer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 915) err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 916) maxburst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 917) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 918) goto output_transfer_stop;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 919)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 920) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 921)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 922) output_transfer_stop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 923) dmaengine_terminate_sync(dd->dst.chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 924) unmap:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 925) atmel_aes_unmap(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 926) exit:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 927) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 928) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 929)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 930) static void atmel_aes_dma_callback(void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 931) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 932) struct atmel_aes_dev *dd = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 933)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 934) atmel_aes_unmap(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 935) dd->is_async = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 936) (void)dd->resume(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 937) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 938)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 939) static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 940) struct crypto_async_request *new_areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 941) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 942) struct crypto_async_request *areq, *backlog;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 943) struct atmel_aes_base_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 944) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 945) bool start_async;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 946) int err, ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 947)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 948) spin_lock_irqsave(&dd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 949) if (new_areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 950) ret = crypto_enqueue_request(&dd->queue, new_areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 951) if (dd->flags & AES_FLAGS_BUSY) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 952) spin_unlock_irqrestore(&dd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 953) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 954) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 955) backlog = crypto_get_backlog(&dd->queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 956) areq = crypto_dequeue_request(&dd->queue);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 957) if (areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 958) dd->flags |= AES_FLAGS_BUSY;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 959) spin_unlock_irqrestore(&dd->lock, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 960)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 961) if (!areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 962) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 963)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 964) if (backlog)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 965) backlog->complete(backlog, -EINPROGRESS);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 966)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 967) ctx = crypto_tfm_ctx(areq->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 968)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 969) dd->areq = areq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 970) dd->ctx = ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 971) start_async = (areq != new_areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 972) dd->is_async = start_async;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 973)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 974) /* WARNING: ctx->start() MAY change dd->is_async. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 975) err = ctx->start(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 976) return (start_async) ? ret : err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 977) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 978)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 979)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 980) /* AES async block ciphers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 981)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 982) static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 983) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 984) return atmel_aes_complete(dd, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 985) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 986)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 987) static int atmel_aes_start(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 988) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 989) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 990) struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 991) bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 992) dd->ctx->block_size != AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 993) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 994)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 995) atmel_aes_set_mode(dd, rctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 996)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 997) err = atmel_aes_hw_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 998) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 999) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1000)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1001) atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1002) if (use_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1003) return atmel_aes_dma_start(dd, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1004) req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1005) atmel_aes_transfer_complete);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1006)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1007) return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1008) atmel_aes_transfer_complete);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1009) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1010)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1011) static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1012) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1013) struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1014) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1015) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1016) size_t datalen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1017) u32 ctr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1018) u16 start, end;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1019) bool use_dma, fragmented = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1020)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1021) /* Check for transfer completion. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1022) ctx->offset += dd->total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1023) if (ctx->offset >= req->cryptlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1024) return atmel_aes_transfer_complete(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1025)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1026) /* Compute data length. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1027) datalen = req->cryptlen - ctx->offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1028) ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1029) ctr = be32_to_cpu(ctx->iv[3]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1030)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1031) /* Check 16bit counter overflow. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1032) start = ctr & 0xffff;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1033) end = start + ctx->blocks - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1034)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1035) if (ctx->blocks >> 16 || end < start) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1036) ctr |= 0xffff;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1037) datalen = AES_BLOCK_SIZE * (0x10000 - start);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1038) fragmented = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1039) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1040)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1041) use_dma = (datalen >= ATMEL_AES_DMA_THRESHOLD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1042)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1043) /* Jump to offset. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1044) src = scatterwalk_ffwd(ctx->src, req->src, ctx->offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1045) dst = ((req->src == req->dst) ? src :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1046) scatterwalk_ffwd(ctx->dst, req->dst, ctx->offset));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1047)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1048) /* Configure hardware. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1049) atmel_aes_write_ctrl(dd, use_dma, ctx->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1050) if (unlikely(fragmented)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1051) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1052) * Increment the counter manually to cope with the hardware
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1053) * counter overflow.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1054) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1055) ctx->iv[3] = cpu_to_be32(ctr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1056) crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1057) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1058)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1059) if (use_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1060) return atmel_aes_dma_start(dd, src, dst, datalen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1061) atmel_aes_ctr_transfer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1062)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1063) return atmel_aes_cpu_start(dd, src, dst, datalen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1064) atmel_aes_ctr_transfer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1065) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1066)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1067) static int atmel_aes_ctr_start(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1068) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1069) struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1070) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1071) struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1072) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1073)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1074) atmel_aes_set_mode(dd, rctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1075)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1076) err = atmel_aes_hw_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1077) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1078) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1079)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1080) memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1081) ctx->offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1082) dd->total = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1083) return atmel_aes_ctr_transfer(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1084) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1085)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1086) static int atmel_aes_crypt(struct skcipher_request *req, unsigned long mode)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1087) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1088) struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1089) struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1090) struct atmel_aes_reqctx *rctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1091) struct atmel_aes_dev *dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1092)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1093) switch (mode & AES_FLAGS_OPMODE_MASK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1094) case AES_FLAGS_CFB8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1095) ctx->block_size = CFB8_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1096) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1097)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1098) case AES_FLAGS_CFB16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1099) ctx->block_size = CFB16_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1100) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1102) case AES_FLAGS_CFB32:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1103) ctx->block_size = CFB32_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1104) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1106) case AES_FLAGS_CFB64:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1107) ctx->block_size = CFB64_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1108) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1110) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1111) ctx->block_size = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1112) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1113) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1114) ctx->is_aead = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1116) dd = atmel_aes_find_dev(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1117) if (!dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1118) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1119)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1120) rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1121) rctx->mode = mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1123) if ((mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1124) !(mode & AES_FLAGS_ENCRYPT) && req->src == req->dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1125) unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1127) if (req->cryptlen >= ivsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1128) scatterwalk_map_and_copy(rctx->lastc, req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1129) req->cryptlen - ivsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1130) ivsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1131) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1132)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1133) return atmel_aes_handle_queue(dd, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1134) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1135)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1136) static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1137) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1138) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1139) struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1140)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1141) if (keylen != AES_KEYSIZE_128 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1142) keylen != AES_KEYSIZE_192 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1143) keylen != AES_KEYSIZE_256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1144) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1145)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1146) memcpy(ctx->key, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1147) ctx->keylen = keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1148)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1149) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1150) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1151)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1152) static int atmel_aes_ecb_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1153) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1154) return atmel_aes_crypt(req, AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1155) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1156)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1157) static int atmel_aes_ecb_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1158) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1159) return atmel_aes_crypt(req, AES_FLAGS_ECB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1160) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1162) static int atmel_aes_cbc_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1163) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1164) return atmel_aes_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1165) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1166)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1167) static int atmel_aes_cbc_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1168) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1169) return atmel_aes_crypt(req, AES_FLAGS_CBC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1170) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1171)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1172) static int atmel_aes_ofb_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1173) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1174) return atmel_aes_crypt(req, AES_FLAGS_OFB | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1175) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1176)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1177) static int atmel_aes_ofb_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1178) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1179) return atmel_aes_crypt(req, AES_FLAGS_OFB);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1180) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1182) static int atmel_aes_cfb_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1183) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1184) return atmel_aes_crypt(req, AES_FLAGS_CFB128 | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1185) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1186)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1187) static int atmel_aes_cfb_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1188) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1189) return atmel_aes_crypt(req, AES_FLAGS_CFB128);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1190) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1192) static int atmel_aes_cfb64_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1193) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1194) return atmel_aes_crypt(req, AES_FLAGS_CFB64 | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1195) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1196)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1197) static int atmel_aes_cfb64_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1198) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1199) return atmel_aes_crypt(req, AES_FLAGS_CFB64);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1200) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1202) static int atmel_aes_cfb32_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1203) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1204) return atmel_aes_crypt(req, AES_FLAGS_CFB32 | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1205) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1206)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1207) static int atmel_aes_cfb32_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1208) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1209) return atmel_aes_crypt(req, AES_FLAGS_CFB32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1210) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1212) static int atmel_aes_cfb16_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1213) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1214) return atmel_aes_crypt(req, AES_FLAGS_CFB16 | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1215) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1217) static int atmel_aes_cfb16_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1218) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1219) return atmel_aes_crypt(req, AES_FLAGS_CFB16);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1220) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1222) static int atmel_aes_cfb8_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1223) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1224) return atmel_aes_crypt(req, AES_FLAGS_CFB8 | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1225) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1226)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1227) static int atmel_aes_cfb8_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1228) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1229) return atmel_aes_crypt(req, AES_FLAGS_CFB8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1230) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1231)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1232) static int atmel_aes_ctr_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1233) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1234) return atmel_aes_crypt(req, AES_FLAGS_CTR | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1235) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1236)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1237) static int atmel_aes_ctr_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1238) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1239) return atmel_aes_crypt(req, AES_FLAGS_CTR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1240) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1241)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1242) static int atmel_aes_init_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1243) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1244) struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1245)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1246) crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1247) ctx->base.start = atmel_aes_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1248)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1249) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1250) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1251)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1252) static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1253) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1254) struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1255)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1256) crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1257) ctx->base.start = atmel_aes_ctr_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1258)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1259) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1260) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1261)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1262) static struct skcipher_alg aes_algs[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1263) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1264) .base.cra_name = "ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1265) .base.cra_driver_name = "atmel-ecb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1266) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1267) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1268)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1269) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1270) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1271) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1272) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1273) .encrypt = atmel_aes_ecb_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1274) .decrypt = atmel_aes_ecb_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1275) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1276) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1277) .base.cra_name = "cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1278) .base.cra_driver_name = "atmel-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1279) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1280) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1282) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1283) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1284) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1285) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1286) .encrypt = atmel_aes_cbc_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1287) .decrypt = atmel_aes_cbc_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1288) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1289) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1290) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1291) .base.cra_name = "ofb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1292) .base.cra_driver_name = "atmel-ofb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1293) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1294) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1295)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1296) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1297) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1298) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1299) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1300) .encrypt = atmel_aes_ofb_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1301) .decrypt = atmel_aes_ofb_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1302) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1303) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1304) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1305) .base.cra_name = "cfb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1306) .base.cra_driver_name = "atmel-cfb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1307) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1308) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1310) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1311) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1312) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1313) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1314) .encrypt = atmel_aes_cfb_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1315) .decrypt = atmel_aes_cfb_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1316) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1317) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1318) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1319) .base.cra_name = "cfb32(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1320) .base.cra_driver_name = "atmel-cfb32-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1321) .base.cra_blocksize = CFB32_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1322) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1323)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1324) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1325) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1326) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1327) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1328) .encrypt = atmel_aes_cfb32_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1329) .decrypt = atmel_aes_cfb32_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1330) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1331) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1332) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1333) .base.cra_name = "cfb16(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1334) .base.cra_driver_name = "atmel-cfb16-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1335) .base.cra_blocksize = CFB16_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1336) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1337)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1338) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1339) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1340) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1341) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1342) .encrypt = atmel_aes_cfb16_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1343) .decrypt = atmel_aes_cfb16_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1344) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1345) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1346) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1347) .base.cra_name = "cfb8(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1348) .base.cra_driver_name = "atmel-cfb8-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1349) .base.cra_blocksize = CFB8_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1350) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1351)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1352) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1353) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1354) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1355) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1356) .encrypt = atmel_aes_cfb8_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1357) .decrypt = atmel_aes_cfb8_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1358) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1359) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1360) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1361) .base.cra_name = "ctr(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1362) .base.cra_driver_name = "atmel-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1363) .base.cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1364) .base.cra_ctxsize = sizeof(struct atmel_aes_ctr_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1365)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1366) .init = atmel_aes_ctr_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1367) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1368) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1369) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1370) .encrypt = atmel_aes_ctr_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1371) .decrypt = atmel_aes_ctr_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1372) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1373) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1374) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1375)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1376) static struct skcipher_alg aes_cfb64_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1377) .base.cra_name = "cfb64(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1378) .base.cra_driver_name = "atmel-cfb64-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1379) .base.cra_blocksize = CFB64_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1380) .base.cra_ctxsize = sizeof(struct atmel_aes_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1381)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1382) .init = atmel_aes_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1383) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1384) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1385) .setkey = atmel_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1386) .encrypt = atmel_aes_cfb64_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1387) .decrypt = atmel_aes_cfb64_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1388) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1389) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1390)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1391)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1392) /* gcm aead functions */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1393)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1394) static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1395) const u32 *data, size_t datalen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1396) const __be32 *ghash_in, __be32 *ghash_out,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1397) atmel_aes_fn_t resume);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1398) static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1399) static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1400)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1401) static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1402) static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1403) static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1404) static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1405) static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1406) static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1407) static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1409) static inline struct atmel_aes_gcm_ctx *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1410) atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx *ctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1411) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1412) return container_of(ctx, struct atmel_aes_gcm_ctx, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1413) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1414)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1415) static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1416) const u32 *data, size_t datalen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1417) const __be32 *ghash_in, __be32 *ghash_out,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1418) atmel_aes_fn_t resume)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1419) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1420) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1421)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1422) dd->data = (u32 *)data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1423) dd->datalen = datalen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1424) ctx->ghash_in = ghash_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1425) ctx->ghash_out = ghash_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1426) ctx->ghash_resume = resume;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1427)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1428) atmel_aes_write_ctrl(dd, false, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1429) return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1430) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1431)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1432) static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1433) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1434) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1435)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1436) /* Set the data length. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1437) atmel_aes_write(dd, AES_AADLENR, dd->total);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1438) atmel_aes_write(dd, AES_CLENR, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1439)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1440) /* If needed, overwrite the GCM Intermediate Hash Word Registers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1441) if (ctx->ghash_in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1442) atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1443)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1444) return atmel_aes_gcm_ghash_finalize(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1445) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1446)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1447) static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1448) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1449) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1450) u32 isr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1451)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1452) /* Write data into the Input Data Registers. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1453) while (dd->datalen > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1454) atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1455) dd->data += 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1456) dd->datalen -= AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1457)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1458) isr = atmel_aes_read(dd, AES_ISR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1459) if (!(isr & AES_INT_DATARDY)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1460) dd->resume = atmel_aes_gcm_ghash_finalize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1461) atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1462) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1463) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1464) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1465)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1466) /* Read the computed hash from GHASHRx. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1467) atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1468)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1469) return ctx->ghash_resume(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1470) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1471)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1472)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1473) static int atmel_aes_gcm_start(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1474) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1475) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1476) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1477) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1478) struct atmel_aes_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1479) size_t ivsize = crypto_aead_ivsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1480) size_t datalen, padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1481) const void *iv = req->iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1482) u8 *data = dd->buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1483) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1484)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1485) atmel_aes_set_mode(dd, rctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1486)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1487) err = atmel_aes_hw_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1488) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1489) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1490)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1491) if (likely(ivsize == GCM_AES_IV_SIZE)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1492) memcpy(ctx->j0, iv, ivsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1493) ctx->j0[3] = cpu_to_be32(1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1494) return atmel_aes_gcm_process(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1495) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1496)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1497) padlen = atmel_aes_padlen(ivsize, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1498) datalen = ivsize + padlen + AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1499) if (datalen > dd->buflen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1500) return atmel_aes_complete(dd, -EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1502) memcpy(data, iv, ivsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1503) memset(data + ivsize, 0, padlen + sizeof(u64));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1504) ((__be64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1505)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1506) return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1507) NULL, ctx->j0, atmel_aes_gcm_process);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1508) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1509)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1510) static int atmel_aes_gcm_process(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1511) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1512) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1513) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1514) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1515) bool enc = atmel_aes_is_encrypt(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1516) u32 authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1517)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1518) /* Compute text length. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1519) authsize = crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1520) ctx->textlen = req->cryptlen - (enc ? 0 : authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1521)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1522) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1523) * According to tcrypt test suite, the GCM Automatic Tag Generation
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1524) * fails when both the message and its associated data are empty.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1525) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1526) if (likely(req->assoclen != 0 || ctx->textlen != 0))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1527) dd->flags |= AES_FLAGS_GTAGEN;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1528)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1529) atmel_aes_write_ctrl(dd, false, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1530) return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1531) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1532)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1533) static int atmel_aes_gcm_length(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1534) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1535) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1536) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1537) __be32 j0_lsw, *j0 = ctx->j0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1538) size_t padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1539)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1540) /* Write incr32(J0) into IV. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1541) j0_lsw = j0[3];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1542) be32_add_cpu(&j0[3], 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1543) atmel_aes_write_block(dd, AES_IVR(0), j0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1544) j0[3] = j0_lsw;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1545)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1546) /* Set aad and text lengths. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1547) atmel_aes_write(dd, AES_AADLENR, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1548) atmel_aes_write(dd, AES_CLENR, ctx->textlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1549)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1550) /* Check whether AAD are present. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1551) if (unlikely(req->assoclen == 0)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1552) dd->datalen = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1553) return atmel_aes_gcm_data(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1554) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1555)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1556) /* Copy assoc data and add padding. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1557) padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1558) if (unlikely(req->assoclen + padlen > dd->buflen))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1559) return atmel_aes_complete(dd, -EINVAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1560) sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1561)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1562) /* Write assoc data into the Input Data register. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1563) dd->data = (u32 *)dd->buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1564) dd->datalen = req->assoclen + padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1565) return atmel_aes_gcm_data(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1566) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1567)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1568) static int atmel_aes_gcm_data(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1569) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1570) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1571) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1572) bool use_dma = (ctx->textlen >= ATMEL_AES_DMA_THRESHOLD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1573) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1574) u32 isr, mr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1575)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1576) /* Write AAD first. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1577) while (dd->datalen > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1578) atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1579) dd->data += 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1580) dd->datalen -= AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1581)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1582) isr = atmel_aes_read(dd, AES_ISR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1583) if (!(isr & AES_INT_DATARDY)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1584) dd->resume = atmel_aes_gcm_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1585) atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1586) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1587) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1588) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1589)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1590) /* GMAC only. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1591) if (unlikely(ctx->textlen == 0))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1592) return atmel_aes_gcm_tag_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1593)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1594) /* Prepare src and dst scatter lists to transfer cipher/plain texts */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1595) src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1596) dst = ((req->src == req->dst) ? src :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1597) scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1598)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1599) if (use_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1600) /* Update the Mode Register for DMA transfers. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1601) mr = atmel_aes_read(dd, AES_MR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1602) mr &= ~(AES_MR_SMOD_MASK | AES_MR_DUALBUFF);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1603) mr |= AES_MR_SMOD_IDATAR0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1604) if (dd->caps.has_dualbuff)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1605) mr |= AES_MR_DUALBUFF;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1606) atmel_aes_write(dd, AES_MR, mr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1607)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1608) return atmel_aes_dma_start(dd, src, dst, ctx->textlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1609) atmel_aes_gcm_tag_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1610) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1611)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1612) return atmel_aes_cpu_start(dd, src, dst, ctx->textlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1613) atmel_aes_gcm_tag_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1614) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1615)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1616) static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1617) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1618) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1619) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1620) __be64 *data = dd->buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1621)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1622) if (likely(dd->flags & AES_FLAGS_GTAGEN)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1623) if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1624) dd->resume = atmel_aes_gcm_tag_init;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1625) atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1626) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1627) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1628)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1629) return atmel_aes_gcm_finalize(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1630) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1631)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1632) /* Read the GCM Intermediate Hash Word Registers. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1633) atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1635) data[0] = cpu_to_be64(req->assoclen * 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1636) data[1] = cpu_to_be64(ctx->textlen * 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1637)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1638) return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1639) ctx->ghash, ctx->ghash, atmel_aes_gcm_tag);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1640) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1641)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1642) static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1643) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1644) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1645) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1646)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1647) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1648) * Change mode to CTR to complete the tag generation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1649) * Use J0 as Initialization Vector.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1650) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1651) flags = dd->flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1652) dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1653) dd->flags |= AES_FLAGS_CTR;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1654) atmel_aes_write_ctrl(dd, false, ctx->j0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1655) dd->flags = flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1656)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1657) atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1658) return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1659) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1660)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1661) static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1662) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1663) struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1664) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1665) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1666) bool enc = atmel_aes_is_encrypt(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1667) u32 offset, authsize, itag[4], *otag = ctx->tag;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1668) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1669)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1670) /* Read the computed tag. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1671) if (likely(dd->flags & AES_FLAGS_GTAGEN))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1672) atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1673) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1674) atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1675)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1676) offset = req->assoclen + ctx->textlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1677) authsize = crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1678) if (enc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1679) scatterwalk_map_and_copy(otag, req->dst, offset, authsize, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1680) err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1681) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1682) scatterwalk_map_and_copy(itag, req->src, offset, authsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1683) err = crypto_memneq(itag, otag, authsize) ? -EBADMSG : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1684) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1685)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1686) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1687) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1688)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1689) static int atmel_aes_gcm_crypt(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1690) unsigned long mode)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1691) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1692) struct atmel_aes_base_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1693) struct atmel_aes_reqctx *rctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1694) struct atmel_aes_dev *dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1695)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1696) ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1697) ctx->block_size = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1698) ctx->is_aead = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1699)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1700) dd = atmel_aes_find_dev(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1701) if (!dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1702) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1703)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1704) rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1705) rctx->mode = AES_FLAGS_GCM | mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1706)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1707) return atmel_aes_handle_queue(dd, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1708) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1709)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1710) static int atmel_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1711) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1712) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1713) struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1714)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1715) if (keylen != AES_KEYSIZE_256 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1716) keylen != AES_KEYSIZE_192 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1717) keylen != AES_KEYSIZE_128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1718) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1719)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1720) memcpy(ctx->key, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1721) ctx->keylen = keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1722)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1723) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1724) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1725)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1726) static int atmel_aes_gcm_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1727) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1728) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1729) return crypto_gcm_check_authsize(authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1730) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1731)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1732) static int atmel_aes_gcm_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1733) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1734) return atmel_aes_gcm_crypt(req, AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1735) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1736)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1737) static int atmel_aes_gcm_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1738) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1739) return atmel_aes_gcm_crypt(req, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1740) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1741)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1742) static int atmel_aes_gcm_init(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1743) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1744) struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1745)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1746) crypto_aead_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1747) ctx->base.start = atmel_aes_gcm_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1748)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1749) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1750) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1751)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1752) static struct aead_alg aes_gcm_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1753) .setkey = atmel_aes_gcm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1754) .setauthsize = atmel_aes_gcm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1755) .encrypt = atmel_aes_gcm_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1756) .decrypt = atmel_aes_gcm_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1757) .init = atmel_aes_gcm_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1758) .ivsize = GCM_AES_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1759) .maxauthsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1760)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1761) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1762) .cra_name = "gcm(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1763) .cra_driver_name = "atmel-gcm-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1764) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1765) .cra_ctxsize = sizeof(struct atmel_aes_gcm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1766) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1767) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1768)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1769)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1770) /* xts functions */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1771)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1772) static inline struct atmel_aes_xts_ctx *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1773) atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx *ctx)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1774) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1775) return container_of(ctx, struct atmel_aes_xts_ctx, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1776) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1777)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1778) static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1779)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1780) static int atmel_aes_xts_start(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1781) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1782) struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1783) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1784) struct atmel_aes_reqctx *rctx = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1785) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1786) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1787)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1788) atmel_aes_set_mode(dd, rctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1789)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1790) err = atmel_aes_hw_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1791) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1792) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1793)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1794) /* Compute the tweak value from req->iv with ecb(aes). */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1795) flags = dd->flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1796) dd->flags &= ~AES_FLAGS_MODE_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1797) dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1798) atmel_aes_write_ctrl_key(dd, false, NULL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1799) ctx->key2, ctx->base.keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1800) dd->flags = flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1801)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1802) atmel_aes_write_block(dd, AES_IDATAR(0), req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1803) return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1804) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1805)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1806) static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1807) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1808) struct skcipher_request *req = skcipher_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1809) bool use_dma = (req->cryptlen >= ATMEL_AES_DMA_THRESHOLD);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1810) u32 tweak[AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1811) static const __le32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1812) u8 *tweak_bytes = (u8 *)tweak;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1813) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1814)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1815) /* Read the computed ciphered tweak value. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1816) atmel_aes_read_block(dd, AES_ODATAR(0), tweak);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1817) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1818) * Hardware quirk:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1819) * the order of the ciphered tweak bytes need to be reversed before
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1820) * writing them into the ODATARx registers.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1821) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1822) for (i = 0; i < AES_BLOCK_SIZE/2; ++i) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1823) u8 tmp = tweak_bytes[AES_BLOCK_SIZE - 1 - i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1824)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1825) tweak_bytes[AES_BLOCK_SIZE - 1 - i] = tweak_bytes[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1826) tweak_bytes[i] = tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1827) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1828)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1829) /* Process the data. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1830) atmel_aes_write_ctrl(dd, use_dma, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1831) atmel_aes_write_block(dd, AES_TWR(0), tweak);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1832) atmel_aes_write_block(dd, AES_ALPHAR(0), one);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1833) if (use_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1834) return atmel_aes_dma_start(dd, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1835) req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1836) atmel_aes_transfer_complete);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1837)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1838) return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1839) atmel_aes_transfer_complete);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1840) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1841)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1842) static int atmel_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1843) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1844) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1845) struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1846) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1847)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1848) err = xts_check_key(crypto_skcipher_tfm(tfm), key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1849) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1850) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1851)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1852) memcpy(ctx->base.key, key, keylen/2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1853) memcpy(ctx->key2, key + keylen/2, keylen/2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1854) ctx->base.keylen = keylen/2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1855)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1856) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1857) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1858)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1859) static int atmel_aes_xts_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1860) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1861) return atmel_aes_crypt(req, AES_FLAGS_XTS | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1862) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1863)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1864) static int atmel_aes_xts_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1865) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1866) return atmel_aes_crypt(req, AES_FLAGS_XTS);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1867) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1868)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1869) static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1870) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1871) struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1872)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1873) crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1874) ctx->base.start = atmel_aes_xts_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1875)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1876) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1877) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1878)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1879) static struct skcipher_alg aes_xts_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1880) .base.cra_name = "xts(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1881) .base.cra_driver_name = "atmel-xts-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1882) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1883) .base.cra_ctxsize = sizeof(struct atmel_aes_xts_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1884)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1885) .min_keysize = 2 * AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1886) .max_keysize = 2 * AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1887) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1888) .setkey = atmel_aes_xts_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1889) .encrypt = atmel_aes_xts_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1890) .decrypt = atmel_aes_xts_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1891) .init = atmel_aes_xts_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1892) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1893)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1894) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1895) /* authenc aead functions */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1896)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1897) static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1898) static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1899) bool is_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1900) static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1901) bool is_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1902) static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1903) static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1904) bool is_async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1905)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1906) static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1907) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1908) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1909) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1910)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1911) if (err && (dd->flags & AES_FLAGS_OWN_SHA))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1912) atmel_sha_authenc_abort(&rctx->auth_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1913) dd->flags &= ~AES_FLAGS_OWN_SHA;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1914) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1915)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1916) static int atmel_aes_authenc_start(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1917) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1918) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1919) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1920) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1921) struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1922) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1923)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1924) atmel_aes_set_mode(dd, &rctx->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1925)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1926) err = atmel_aes_hw_init(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1927) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1928) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1929)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1930) return atmel_sha_authenc_schedule(&rctx->auth_req, ctx->auth,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1931) atmel_aes_authenc_init, dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1932) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1933)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1934) static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1935) bool is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1936) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1937) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1938) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1939)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1940) if (is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1941) dd->is_async = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1942) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1943) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1944)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1945) /* If here, we've got the ownership of the SHA device. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1946) dd->flags |= AES_FLAGS_OWN_SHA;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1947)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1948) /* Configure the SHA device. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1949) return atmel_sha_authenc_init(&rctx->auth_req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1950) req->src, req->assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1951) rctx->textlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1952) atmel_aes_authenc_transfer, dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1953) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1954)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1955) static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1956) bool is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1957) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1958) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1959) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1960) bool enc = atmel_aes_is_encrypt(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1961) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1962) __be32 iv[AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1963) u32 emr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1964)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1965) if (is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1966) dd->is_async = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1967) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1968) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1969)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1970) /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1971) src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1972) dst = src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1973)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1974) if (req->src != req->dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1975) dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1976)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1977) /* Configure the AES device. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1978) memcpy(iv, req->iv, sizeof(iv));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1979)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1980) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1981) * Here we always set the 2nd parameter of atmel_aes_write_ctrl() to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1982) * 'true' even if the data transfer is actually performed by the CPU (so
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1983) * not by the DMA) because we must force the AES_MR_SMOD bitfield to the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1984) * value AES_MR_SMOD_IDATAR0. Indeed, both AES_MR_SMOD and SHA_MR_SMOD
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1985) * must be set to *_MR_SMOD_IDATAR0.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1986) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1987) atmel_aes_write_ctrl(dd, true, iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1988) emr = AES_EMR_PLIPEN;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1989) if (!enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1990) emr |= AES_EMR_PLIPD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1991) atmel_aes_write(dd, AES_EMR, emr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1992)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1993) /* Transfer data. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1994) return atmel_aes_dma_start(dd, src, dst, rctx->textlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1995) atmel_aes_authenc_digest);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1996) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1997)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1998) static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1999) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2000) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2001) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2002)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2003) /* atmel_sha_authenc_final() releases the SHA device. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2004) dd->flags &= ~AES_FLAGS_OWN_SHA;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2005) return atmel_sha_authenc_final(&rctx->auth_req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2006) rctx->digest, sizeof(rctx->digest),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2007) atmel_aes_authenc_final, dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2008) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2009)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2010) static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2011) bool is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2012) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2013) struct aead_request *req = aead_request_cast(dd->areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2014) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2015) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2016) bool enc = atmel_aes_is_encrypt(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2017) u32 idigest[SHA512_DIGEST_SIZE / sizeof(u32)], *odigest = rctx->digest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2018) u32 offs, authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2019)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2020) if (is_async)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2021) dd->is_async = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2022) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2023) goto complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2024)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2025) offs = req->assoclen + rctx->textlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2026) authsize = crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2027) if (enc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2028) scatterwalk_map_and_copy(odigest, req->dst, offs, authsize, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2029) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2030) scatterwalk_map_and_copy(idigest, req->src, offs, authsize, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2031) if (crypto_memneq(idigest, odigest, authsize))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2032) err = -EBADMSG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2033) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2034)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2035) complete:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2036) return atmel_aes_complete(dd, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2037) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2038)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2039) static int atmel_aes_authenc_setkey(struct crypto_aead *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2040) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2041) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2042) struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2043) struct crypto_authenc_keys keys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2044) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2045)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2046) if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2047) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2048)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2049) if (keys.enckeylen > sizeof(ctx->base.key))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2050) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2051)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2052) /* Save auth key. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2053) err = atmel_sha_authenc_setkey(ctx->auth,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2054) keys.authkey, keys.authkeylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2055) crypto_aead_get_flags(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2056) if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2057) memzero_explicit(&keys, sizeof(keys));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2058) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2059) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2060)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2061) /* Save enc key. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2062) ctx->base.keylen = keys.enckeylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2063) memcpy(ctx->base.key, keys.enckey, keys.enckeylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2064)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2065) memzero_explicit(&keys, sizeof(keys));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2066) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2067)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2068) badkey:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2069) memzero_explicit(&keys, sizeof(keys));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2070) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2071) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2072)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2073) static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2074) unsigned long auth_mode)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2075) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2076) struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2077) unsigned int auth_reqsize = atmel_sha_authenc_get_reqsize();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2078)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2079) ctx->auth = atmel_sha_authenc_spawn(auth_mode);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2080) if (IS_ERR(ctx->auth))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2081) return PTR_ERR(ctx->auth);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2082)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2083) crypto_aead_set_reqsize(tfm, (sizeof(struct atmel_aes_authenc_reqctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2084) auth_reqsize));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2085) ctx->base.start = atmel_aes_authenc_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2086)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2087) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2088) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2089)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2090) static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2091) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2092) return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2093) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2094)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2095) static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2096) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2097) return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA224);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2098) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2099)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2100) static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2101) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2102) return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA256);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2105) static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2106) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2107) return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA384);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2108) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2110) static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2111) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2112) return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA512);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2113) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2115) static void atmel_aes_authenc_exit_tfm(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2116) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2117) struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2119) atmel_sha_authenc_free(ctx->auth);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2120) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2122) static int atmel_aes_authenc_crypt(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2123) unsigned long mode)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2124) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2125) struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2126) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2127) struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2128) u32 authsize = crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2129) bool enc = (mode & AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2130) struct atmel_aes_dev *dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2132) /* Compute text length. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2133) if (!enc && req->cryptlen < authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2134) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2135) rctx->textlen = req->cryptlen - (enc ? 0 : authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2137) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2138) * Currently, empty messages are not supported yet:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2139) * the SHA auto-padding can be used only on non-empty messages.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2140) * Hence a special case needs to be implemented for empty message.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2141) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2142) if (!rctx->textlen && !req->assoclen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2143) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2145) rctx->base.mode = mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2146) ctx->block_size = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2147) ctx->is_aead = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2148)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2149) dd = atmel_aes_find_dev(ctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2150) if (!dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2151) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2153) return atmel_aes_handle_queue(dd, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2154) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2156) static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2157) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2158) return atmel_aes_authenc_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2159) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2161) static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2162) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2163) return atmel_aes_authenc_crypt(req, AES_FLAGS_CBC);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2164) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2165)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2166) static struct aead_alg aes_authenc_algs[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2167) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2168) .setkey = atmel_aes_authenc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2169) .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2170) .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2171) .init = atmel_aes_authenc_hmac_sha1_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2172) .exit = atmel_aes_authenc_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2173) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2174) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2176) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2177) .cra_name = "authenc(hmac(sha1),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2178) .cra_driver_name = "atmel-authenc-hmac-sha1-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2179) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2180) .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2181) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2182) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2183) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2184) .setkey = atmel_aes_authenc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2185) .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2186) .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2187) .init = atmel_aes_authenc_hmac_sha224_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2188) .exit = atmel_aes_authenc_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2189) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2190) .maxauthsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2192) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2193) .cra_name = "authenc(hmac(sha224),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2194) .cra_driver_name = "atmel-authenc-hmac-sha224-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2195) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2196) .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2197) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2198) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2199) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2200) .setkey = atmel_aes_authenc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2201) .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2202) .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2203) .init = atmel_aes_authenc_hmac_sha256_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2204) .exit = atmel_aes_authenc_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2205) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2206) .maxauthsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2208) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2209) .cra_name = "authenc(hmac(sha256),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2210) .cra_driver_name = "atmel-authenc-hmac-sha256-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2211) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2212) .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2213) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2214) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2215) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2216) .setkey = atmel_aes_authenc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2217) .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2218) .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2219) .init = atmel_aes_authenc_hmac_sha384_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2220) .exit = atmel_aes_authenc_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2221) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2222) .maxauthsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2224) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2225) .cra_name = "authenc(hmac(sha384),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2226) .cra_driver_name = "atmel-authenc-hmac-sha384-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2227) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2228) .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2229) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2230) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2231) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2232) .setkey = atmel_aes_authenc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2233) .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2234) .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2235) .init = atmel_aes_authenc_hmac_sha512_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2236) .exit = atmel_aes_authenc_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2237) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2238) .maxauthsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2239)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2240) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2241) .cra_name = "authenc(hmac(sha512),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2242) .cra_driver_name = "atmel-authenc-hmac-sha512-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2243) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2244) .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2245) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2246) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2247) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2248) #endif /* CONFIG_CRYPTO_DEV_ATMEL_AUTHENC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2249)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2250) /* Probe functions */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2251)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2252) static int atmel_aes_buff_init(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2253) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2254) dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2255) dd->buflen = ATMEL_AES_BUFFER_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2256) dd->buflen &= ~(AES_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2258) if (!dd->buf) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2259) dev_err(dd->dev, "unable to alloc pages.\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2260) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2261) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2262)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2263) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2264) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2265)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2266) static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2267) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2268) free_page((unsigned long)dd->buf);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2269) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2271) static int atmel_aes_dma_init(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2272) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2273) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2274)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2275) /* Try to grab 2 DMA channels */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2276) dd->src.chan = dma_request_chan(dd->dev, "tx");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2277) if (IS_ERR(dd->src.chan)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2278) ret = PTR_ERR(dd->src.chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2279) goto err_dma_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2280) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2282) dd->dst.chan = dma_request_chan(dd->dev, "rx");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2283) if (IS_ERR(dd->dst.chan)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2284) ret = PTR_ERR(dd->dst.chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2285) goto err_dma_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2286) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2287)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2288) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2289)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2290) err_dma_out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2291) dma_release_channel(dd->src.chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2292) err_dma_in:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2293) dev_err(dd->dev, "no DMA channel available\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2294) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2295) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2297) static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2298) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2299) dma_release_channel(dd->dst.chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2300) dma_release_channel(dd->src.chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2301) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2302)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2303) static void atmel_aes_queue_task(unsigned long data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2304) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2305) struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2306)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2307) atmel_aes_handle_queue(dd, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2308) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2310) static void atmel_aes_done_task(unsigned long data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2311) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2312) struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2313)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2314) dd->is_async = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2315) (void)dd->resume(dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2316) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2317)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2318) static irqreturn_t atmel_aes_irq(int irq, void *dev_id)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2319) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2320) struct atmel_aes_dev *aes_dd = dev_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2321) u32 reg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2322)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2323) reg = atmel_aes_read(aes_dd, AES_ISR);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2324) if (reg & atmel_aes_read(aes_dd, AES_IMR)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2325) atmel_aes_write(aes_dd, AES_IDR, reg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2326) if (AES_FLAGS_BUSY & aes_dd->flags)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2327) tasklet_schedule(&aes_dd->done_task);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2328) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2329) dev_warn(aes_dd->dev, "AES interrupt when no active requests.\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2330) return IRQ_HANDLED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2331) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2332)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2333) return IRQ_NONE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2334) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2336) static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2337) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2338) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2339)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2340) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2341) if (dd->caps.has_authenc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2342) for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2343) crypto_unregister_aead(&aes_authenc_algs[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2344) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2345)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2346) if (dd->caps.has_xts)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2347) crypto_unregister_skcipher(&aes_xts_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2349) if (dd->caps.has_gcm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2350) crypto_unregister_aead(&aes_gcm_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2351)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2352) if (dd->caps.has_cfb64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2353) crypto_unregister_skcipher(&aes_cfb64_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2354)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2355) for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2356) crypto_unregister_skcipher(&aes_algs[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2357) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2358)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2359) static void atmel_aes_crypto_alg_init(struct crypto_alg *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2360) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2361) alg->cra_flags = CRYPTO_ALG_ASYNC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2362) alg->cra_alignmask = 0xf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2363) alg->cra_priority = ATMEL_AES_PRIORITY;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2364) alg->cra_module = THIS_MODULE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2365) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2366)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2367) static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2368) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2369) int err, i, j;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2370)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2371) for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2372) atmel_aes_crypto_alg_init(&aes_algs[i].base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2373)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2374) err = crypto_register_skcipher(&aes_algs[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2375) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2376) goto err_aes_algs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2377) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2378)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2379) if (dd->caps.has_cfb64) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2380) atmel_aes_crypto_alg_init(&aes_cfb64_alg.base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2381)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2382) err = crypto_register_skcipher(&aes_cfb64_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2383) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2384) goto err_aes_cfb64_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2385) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2386)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2387) if (dd->caps.has_gcm) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2388) atmel_aes_crypto_alg_init(&aes_gcm_alg.base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2389)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2390) err = crypto_register_aead(&aes_gcm_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2391) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2392) goto err_aes_gcm_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2393) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2394)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2395) if (dd->caps.has_xts) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2396) atmel_aes_crypto_alg_init(&aes_xts_alg.base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2397)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2398) err = crypto_register_skcipher(&aes_xts_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2399) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2400) goto err_aes_xts_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2401) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2402)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2403) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2404) if (dd->caps.has_authenc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2405) for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2406) atmel_aes_crypto_alg_init(&aes_authenc_algs[i].base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2407)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2408) err = crypto_register_aead(&aes_authenc_algs[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2409) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2410) goto err_aes_authenc_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2411) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2412) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2413) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2414)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2415) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2416)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2417) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2418) /* i = ARRAY_SIZE(aes_authenc_algs); */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2419) err_aes_authenc_alg:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2420) for (j = 0; j < i; j++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2421) crypto_unregister_aead(&aes_authenc_algs[j]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2422) crypto_unregister_skcipher(&aes_xts_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2423) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2424) err_aes_xts_alg:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2425) crypto_unregister_aead(&aes_gcm_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2426) err_aes_gcm_alg:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2427) crypto_unregister_skcipher(&aes_cfb64_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2428) err_aes_cfb64_alg:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2429) i = ARRAY_SIZE(aes_algs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2430) err_aes_algs:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2431) for (j = 0; j < i; j++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2432) crypto_unregister_skcipher(&aes_algs[j]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2433)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2434) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2435) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2436)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2437) static void atmel_aes_get_cap(struct atmel_aes_dev *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2438) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2439) dd->caps.has_dualbuff = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2440) dd->caps.has_cfb64 = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2441) dd->caps.has_gcm = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2442) dd->caps.has_xts = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2443) dd->caps.has_authenc = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2444) dd->caps.max_burst_size = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2446) /* keep only major version number */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2447) switch (dd->hw_version & 0xff0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2448) case 0x500:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2449) dd->caps.has_dualbuff = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2450) dd->caps.has_cfb64 = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2451) dd->caps.has_gcm = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2452) dd->caps.has_xts = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2453) dd->caps.has_authenc = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2454) dd->caps.max_burst_size = 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2455) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2456) case 0x200:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2457) dd->caps.has_dualbuff = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2458) dd->caps.has_cfb64 = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2459) dd->caps.has_gcm = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2460) dd->caps.max_burst_size = 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2461) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2462) case 0x130:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2463) dd->caps.has_dualbuff = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2464) dd->caps.has_cfb64 = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2465) dd->caps.max_burst_size = 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2466) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2467) case 0x120:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2468) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2469) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2470) dev_warn(dd->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2471) "Unmanaged aes version, set minimum capabilities\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2472) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2473) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2474) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2475)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2476) #if defined(CONFIG_OF)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2477) static const struct of_device_id atmel_aes_dt_ids[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2478) { .compatible = "atmel,at91sam9g46-aes" },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2479) { /* sentinel */ }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2480) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2481) MODULE_DEVICE_TABLE(of, atmel_aes_dt_ids);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2482) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2483)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2484) static int atmel_aes_probe(struct platform_device *pdev)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2485) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2486) struct atmel_aes_dev *aes_dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2487) struct device *dev = &pdev->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2488) struct resource *aes_res;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2489) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2490)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2491) aes_dd = devm_kzalloc(&pdev->dev, sizeof(*aes_dd), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2492) if (!aes_dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2493) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2494)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2495) aes_dd->dev = dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2496)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2497) platform_set_drvdata(pdev, aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2498)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2499) INIT_LIST_HEAD(&aes_dd->list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2500) spin_lock_init(&aes_dd->lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2502) tasklet_init(&aes_dd->done_task, atmel_aes_done_task,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2503) (unsigned long)aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2504) tasklet_init(&aes_dd->queue_task, atmel_aes_queue_task,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2505) (unsigned long)aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2506)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2507) crypto_init_queue(&aes_dd->queue, ATMEL_AES_QUEUE_LENGTH);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2508)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2509) /* Get the base address */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2510) aes_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2511) if (!aes_res) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2512) dev_err(dev, "no MEM resource info\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2513) err = -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2514) goto err_tasklet_kill;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2515) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2516) aes_dd->phys_base = aes_res->start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2517)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2518) /* Get the IRQ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2519) aes_dd->irq = platform_get_irq(pdev, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2520) if (aes_dd->irq < 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2521) err = aes_dd->irq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2522) goto err_tasklet_kill;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2523) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2524)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2525) err = devm_request_irq(&pdev->dev, aes_dd->irq, atmel_aes_irq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2526) IRQF_SHARED, "atmel-aes", aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2527) if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2528) dev_err(dev, "unable to request aes irq.\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2529) goto err_tasklet_kill;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2530) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2531)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2532) /* Initializing the clock */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2533) aes_dd->iclk = devm_clk_get(&pdev->dev, "aes_clk");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2534) if (IS_ERR(aes_dd->iclk)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2535) dev_err(dev, "clock initialization failed.\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2536) err = PTR_ERR(aes_dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2537) goto err_tasklet_kill;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2538) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2539)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2540) aes_dd->io_base = devm_ioremap_resource(&pdev->dev, aes_res);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2541) if (IS_ERR(aes_dd->io_base)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2542) dev_err(dev, "can't ioremap\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2543) err = PTR_ERR(aes_dd->io_base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2544) goto err_tasklet_kill;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2545) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2546)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2547) err = clk_prepare(aes_dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2548) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2549) goto err_tasklet_kill;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2550)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2551) err = atmel_aes_hw_version_init(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2552) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2553) goto err_iclk_unprepare;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2554)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2555) atmel_aes_get_cap(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2556)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2557) #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2558) if (aes_dd->caps.has_authenc && !atmel_sha_authenc_is_ready()) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2559) err = -EPROBE_DEFER;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2560) goto err_iclk_unprepare;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2561) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2562) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2563)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2564) err = atmel_aes_buff_init(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2565) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2566) goto err_iclk_unprepare;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2567)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2568) err = atmel_aes_dma_init(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2569) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2570) goto err_buff_cleanup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2571)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2572) spin_lock(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2573) list_add_tail(&aes_dd->list, &atmel_aes.dev_list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2574) spin_unlock(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2575)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2576) err = atmel_aes_register_algs(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2577) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2578) goto err_algs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2579)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2580) dev_info(dev, "Atmel AES - Using %s, %s for DMA transfers\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2581) dma_chan_name(aes_dd->src.chan),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2582) dma_chan_name(aes_dd->dst.chan));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2583)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2584) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2585)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2586) err_algs:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2587) spin_lock(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2588) list_del(&aes_dd->list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2589) spin_unlock(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2590) atmel_aes_dma_cleanup(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2591) err_buff_cleanup:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2592) atmel_aes_buff_cleanup(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2593) err_iclk_unprepare:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2594) clk_unprepare(aes_dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2595) err_tasklet_kill:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2596) tasklet_kill(&aes_dd->done_task);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2597) tasklet_kill(&aes_dd->queue_task);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2598)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2599) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2600) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2601)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2602) static int atmel_aes_remove(struct platform_device *pdev)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2603) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2604) struct atmel_aes_dev *aes_dd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2605)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2606) aes_dd = platform_get_drvdata(pdev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2607) if (!aes_dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2608) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2609) spin_lock(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2610) list_del(&aes_dd->list);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2611) spin_unlock(&atmel_aes.lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2612)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2613) atmel_aes_unregister_algs(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2614)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2615) tasklet_kill(&aes_dd->done_task);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2616) tasklet_kill(&aes_dd->queue_task);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2617)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2618) atmel_aes_dma_cleanup(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2619) atmel_aes_buff_cleanup(aes_dd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2620)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2621) clk_unprepare(aes_dd->iclk);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2622)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2623) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2624) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2625)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2626) static struct platform_driver atmel_aes_driver = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2627) .probe = atmel_aes_probe,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2628) .remove = atmel_aes_remove,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2629) .driver = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2630) .name = "atmel_aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2631) .of_match_table = of_match_ptr(atmel_aes_dt_ids),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2632) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2633) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2635) module_platform_driver(atmel_aes_driver);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2636)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2637) MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2638) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2639) MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");